summaryrefslogtreecommitdiffstats
path: root/scripts/global_setting.py
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/global_setting.py')
-rw-r--r--scripts/global_setting.py31
1 files changed, 13 insertions, 18 deletions
diff --git a/scripts/global_setting.py b/scripts/global_setting.py
index bb035f9..51dfec1 100644
--- a/scripts/global_setting.py
+++ b/scripts/global_setting.py
@@ -3,34 +3,29 @@ from __future__ import division
from __future__ import print_function
-import collections
-import math
-import modeling
-import optimization
+# import collections
+# import math
+# import modeling
+# import optimization
import tokenization
-import six
-import tensorflow as tf
-import os
+# import six
+# import tensorflow as tf
+# import os
-
-
-
-### Global variables
+# Global variables
# GPU number, default: -1, means not used
-CUDA_VISIBLE_DEVICES="2"
+CUDA_VISIBLE_DEVICES = "2"
# Questions to be trained/predicted
-questions = ['Communication Service Name','Max Number of UEs','Data Rate Downlink','Latency','Data Rate Uplink','Resource Sharing Level','Mobility','Area']
+questions = ['Communication Service Name', 'Max Number of UEs', 'Data Rate Downlink', 'Latency', 'Data Rate Uplink', 'Resource Sharing Level', 'Mobility', 'Area']
# Configuration file
-FLAGS_bert_config_file = '/home/run/chinese_L-12_H-768_A-12/bert_config.json'
-FLAGS_vocab_file = '/home/run/chinese_L-12_H-768_A-12/vocab.txt'
-FLAGS_init_checkpoint_squad = '/home/run/chinese_L-12_H-768_A-12/bert_model.ckpt'
+FLAGS_bert_config_file = '/home/run/uncased_L-12_H-768_A-12/bert_config.json'
+FLAGS_vocab_file = '/home/run/uncased_L-12_H-768_A-12/vocab.txt'
+FLAGS_init_checkpoint_squad = '/home/run/uncased_L-12_H-768_A-12/bert_model.ckpt'
max_seq_length = 512
tokenizer_ch = tokenization.FullTokenizer(vocab_file=FLAGS_vocab_file, do_lower_case=True)
-
-