From f15dc37e62eee9d5d02bf58af1053750a20fad23 Mon Sep 17 00:00:00 2001 From: wangy122 Date: Fri, 5 Mar 2021 15:11:17 +0800 Subject: feat:add dockerfile Issue-ID: USECASEUI-525 Signed-off-by: wangy122 Change-Id: Ifca8abdfff479216bb0ea6b84d2c61fb640039f5 --- scripts/global_setting.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 scripts/global_setting.py (limited to 'scripts/global_setting.py') diff --git a/scripts/global_setting.py b/scripts/global_setting.py new file mode 100644 index 0000000..bb035f9 --- /dev/null +++ b/scripts/global_setting.py @@ -0,0 +1,36 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + + +import collections +import math +import modeling +import optimization +import tokenization +import six +import tensorflow as tf +import os + + + + +### Global variables + +# GPU number, default: -1, means not used +CUDA_VISIBLE_DEVICES="2" + +# Questions to be trained/predicted +questions = ['Communication Service Name','Max Number of UEs','Data Rate Downlink','Latency','Data Rate Uplink','Resource Sharing Level','Mobility','Area'] + +# Configuration file +FLAGS_bert_config_file = '/home/run/chinese_L-12_H-768_A-12/bert_config.json' +FLAGS_vocab_file = '/home/run/chinese_L-12_H-768_A-12/vocab.txt' +FLAGS_init_checkpoint_squad = '/home/run/chinese_L-12_H-768_A-12/bert_model.ckpt' + +max_seq_length = 512 + + +tokenizer_ch = tokenization.FullTokenizer(vocab_file=FLAGS_vocab_file, do_lower_case=True) + + -- cgit 1.2.3-korg