TiberiuCristianLeon commited on
Commit
37d32ba
·
verified ·
1 Parent(s): 7c6021f

Quickmt auto changes

Browse files
Files changed (1) hide show
  1. app.py +15 -17
app.py CHANGED
@@ -230,11 +230,13 @@ class Translators:
230
  def quickmttranslate(model_path, input_text):
231
  from quickmt import Translator
232
  # 'auto' auto-detects GPU, set to "cpu" to force CPU inference
233
- device = 'gpu' if torch.cuda.is_available() else 'cpu'
234
- translator = Translator(str(model_path), device = device)
235
- # translation = Translator(f"./quickmt-{self.sl}-{self.tl}/", device="auto", inter_threads=2)
236
- # set beam size to 1 for faster speed (but lower quality)
237
- translation = translator(input_text, beam_size=5, max_input_length = 512, max_decoding_length = 512)
 
 
238
  # print(model_path, input_text, translation)
239
  return translation
240
 
@@ -257,15 +259,15 @@ class Translators:
257
  # quickmt_models.sort()
258
  quickmt_models = ['ar-en', 'bn-en', 'cs-en', 'da-en', 'de-en', 'el-en', 'en-ar', 'en-bn',
259
  'en-cs', 'en-da', 'en-de', 'en-el', 'en-es', 'en-fa', 'en-fr', 'en-he',
260
- 'en-hi', 'en-hu', 'en-id', 'en-it', 'en-ja', 'en-ko', 'en-lv', 'en-pl',
261
  'en-pt', 'en-ro', 'en-ru', 'en-sv', 'en-th', 'en-tr', 'en-ur', 'en-vi',
262
  'en-zh', 'es-en', 'fa-en', 'fr-en', 'he-en', 'hi-en', 'hu-en', 'id-en',
263
- 'it-en', 'ja-en', 'ko-en', 'lv-en', 'pl-en', 'pt-en', 'ro-en', 'ru-en',
264
  'th-en', 'tr-en', 'ur-en', 'vi-en', 'zh-en']
265
  # available_languages = list(set([lang for model in quickmt_models for lang in model.split('-')]))
266
  # available_languages.sort()
267
  available_languages = ['ar', 'bn', 'cs', 'da', 'de', 'el', 'en', 'es', 'fa', 'fr', 'he',
268
- 'hi', 'hu', 'id', 'it', 'ja', 'ko', 'lv', 'pl', 'pt', 'ro', 'ru',
269
  'sv', 'th', 'tr', 'ur', 'vi', 'zh']
270
  # print(quickmt_models, available_languages)
271
  # Direct translation model
@@ -707,13 +709,9 @@ def get_info(model_name: str, sl: str = None, tl: str = None):
707
  'is-en', 'mt-en', 'nn-en', 'sq-en', 'sw-en', 'zh_hant-en']"""
708
  elif "QUICKMT" in model_name:
709
  return """[QUICKMT](https://huggingface.co/quickmt)
710
- ['ar-en', 'bn-en', 'cs-en', 'da-en', 'de-en', 'el-en', 'en-ar', 'en-bn',
711
- 'en-cs', 'en-da', 'en-de', 'en-el', 'en-es', 'en-fa', 'en-fr', 'en-he',
712
- 'en-hi', 'en-hu', 'en-id', 'en-it', 'en-ja', 'en-ko', 'en-lv', 'en-pl',
713
- 'en-pt', 'en-ro', 'en-ru', 'en-sv', 'en-th', 'en-tr', 'en-ur', 'en-vi',
714
- 'en-zh', 'es-en', 'fa-en', 'fr-en', 'he-en', 'hi-en', 'hu-en', 'id-en',
715
- 'it-en', 'ja-en', 'ko-en', 'lv-en', 'pl-en', 'pt-en', 'ro-en', 'ru-en',
716
- 'th-en', 'tr-en', 'ur-en', 'vi-en', 'zh-en']"""
717
  elif model_name == "Google":
718
  return "Google Translate Online"
719
  else:
@@ -721,8 +719,8 @@ def get_info(model_name: str, sl: str = None, tl: str = None):
721
 
722
  with gr.Blocks() as interface:
723
  gr.Markdown("### Machine Text Translation with Gradio API and MCP Server")
724
- input_text = gr.Textbox(label="Enter text to translate:", placeholder="Type your text here, maximum 512 tokens", autofocus=True, submit_btn='Translate', max_length=512)
725
-
726
  with gr.Row(variant="compact"):
727
  s_language = gr.Dropdown(choices=langs, value = DEFAULTS[0], label="Source language", interactive=True, scale=2)
728
  t_language = gr.Dropdown(choices=langs, value = DEFAULTS[1], label="Target language", interactive=True, scale=2)
 
230
  def quickmttranslate(model_path, input_text):
231
  from quickmt import Translator
232
  # 'auto' auto-detects GPU, set to "cpu" to force CPU inference
233
+ # device = 'gpu' if torch.cuda.is_available() else 'cpu'
234
+ translator = Translator(str(model_path))
235
+ # translation = Translator(f"./quickmt-{self.sl}-{self.tl}/", device="auto/cpu", intra_threads=2, inter_threads=2, compute_type="int8")
236
+ # set beam size to 1 for faster speed (but lower quality) device="auto/cpu/gpu"
237
+ # Options for compute_type: default, auto, int8, int8_float32, int8_float16, int8_bfloat16, int16, float16, bfloat16, float32
238
+ # "int8" will work well for inference on CPU and give "int8_float16" or "int8_bfloat16" a try for GPU inference.
239
+ translation = translator(input_text, device="auto", compute_type="auto", beam_size=5, max_input_length = 512, max_decoding_length = 512)
240
  # print(model_path, input_text, translation)
241
  return translation
242
 
 
259
  # quickmt_models.sort()
260
  quickmt_models = ['ar-en', 'bn-en', 'cs-en', 'da-en', 'de-en', 'el-en', 'en-ar', 'en-bn',
261
  'en-cs', 'en-da', 'en-de', 'en-el', 'en-es', 'en-fa', 'en-fr', 'en-he',
262
+ 'en-hi', 'en-hu', 'en-id', 'en-is', 'en-it', 'en-ja', 'en-ko', 'en-lv', 'en-pl',
263
  'en-pt', 'en-ro', 'en-ru', 'en-sv', 'en-th', 'en-tr', 'en-ur', 'en-vi',
264
  'en-zh', 'es-en', 'fa-en', 'fr-en', 'he-en', 'hi-en', 'hu-en', 'id-en',
265
+ 'is-en', 'it-en', 'ja-en', 'ko-en', 'lv-en', 'pl-en', 'pt-en', 'ro-en', 'ru-en',
266
  'th-en', 'tr-en', 'ur-en', 'vi-en', 'zh-en']
267
  # available_languages = list(set([lang for model in quickmt_models for lang in model.split('-')]))
268
  # available_languages.sort()
269
  available_languages = ['ar', 'bn', 'cs', 'da', 'de', 'el', 'en', 'es', 'fa', 'fr', 'he',
270
+ 'hi', 'hu', 'id', 'it', 'is', 'ja', 'ko', 'lv', 'pl', 'pt', 'ro', 'ru',
271
  'sv', 'th', 'tr', 'ur', 'vi', 'zh']
272
  # print(quickmt_models, available_languages)
273
  # Direct translation model
 
709
  'is-en', 'mt-en', 'nn-en', 'sq-en', 'sw-en', 'zh_hant-en']"""
710
  elif "QUICKMT" in model_name:
711
  return """[QUICKMT](https://huggingface.co/quickmt)
712
+ ['ar', 'bn', 'cs', 'da', 'de', 'el', 'en', 'es', 'fa', 'fr', 'he',
713
+ 'hi', 'hu', 'id', 'it', 'is', 'ja', 'ko', 'lv', 'pl', 'pt', 'ro', 'ru',
714
+ 'sv', 'th', 'tr', 'ur', 'vi', 'zh']"""
 
 
 
 
715
  elif model_name == "Google":
716
  return "Google Translate Online"
717
  else:
 
719
 
720
  with gr.Blocks() as interface:
721
  gr.Markdown("### Machine Text Translation with Gradio API and MCP Server")
722
+ input_text = gr.Textbox(label="Enter text to translate:", placeholder="Type your text here, maximum 512 tokens",
723
+ autofocus=True, submit_btn='Translate', max_length=512)
724
  with gr.Row(variant="compact"):
725
  s_language = gr.Dropdown(choices=langs, value = DEFAULTS[0], label="Source language", interactive=True, scale=2)
726
  t_language = gr.Dropdown(choices=langs, value = DEFAULTS[1], label="Target language", interactive=True, scale=2)