|  |  | 
					
						
						|  |  | 
					
						
						|  |  | 
					
						
						|  |  | 
					
						
						|  |  | 
					
						
						|  |  | 
					
						
						|  |  | 
					
						
						|  |  | 
					
						
						|  |  | 
					
						
						|  |  | 
					
						
						|  |  | 
					
						
						|  |  | 
					
						
						|  | try: | 
					
						
						|  | import os | 
					
						
						|  | from huggingface_hub import InferenceClient | 
					
						
						|  |  | 
					
						
						|  | client = InferenceClient( | 
					
						
						|  | provider="auto", | 
					
						
						|  | api_key=os.environ["HF_TOKEN"], | 
					
						
						|  | ) | 
					
						
						|  |  | 
					
						
						|  | result = client.sentence_similarity( | 
					
						
						|  | { | 
					
						
						|  | "source_sentence": "That is a happy person", | 
					
						
						|  | "sentences": [ | 
					
						
						|  | "That is a happy dog", | 
					
						
						|  | "That is a very happy person", | 
					
						
						|  | "Today is a sunny day" | 
					
						
						|  | ] | 
					
						
						|  | }, | 
					
						
						|  | model="BAAI/bge-m3", | 
					
						
						|  | ) | 
					
						
						|  | with open('BAAI_bge-m3_2.txt', 'w', encoding='utf-8') as f: | 
					
						
						|  | f.write('Everything was good in BAAI_bge-m3_2.txt') | 
					
						
						|  | except Exception as e: | 
					
						
						|  | import os | 
					
						
						|  | from slack_sdk import WebClient | 
					
						
						|  | client = WebClient(token=os.environ['SLACK_TOKEN']) | 
					
						
						|  | client.chat_postMessage( | 
					
						
						|  | channel='#exp-slack-alerts', | 
					
						
						|  | text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/BAAI_bge-m3_2.txt|BAAI_bge-m3_2.txt>', | 
					
						
						|  | ) | 
					
						
						|  |  | 
					
						
						|  | with open('BAAI_bge-m3_2.txt', 'a', encoding='utf-8') as f: | 
					
						
						|  | import traceback | 
					
						
						|  | f.write('''```CODE: | 
					
						
						|  | import os | 
					
						
						|  | from huggingface_hub import InferenceClient | 
					
						
						|  |  | 
					
						
						|  | client = InferenceClient( | 
					
						
						|  | provider="auto", | 
					
						
						|  | api_key=os.environ["HF_TOKEN"], | 
					
						
						|  | ) | 
					
						
						|  |  | 
					
						
						|  | result = client.sentence_similarity( | 
					
						
						|  | { | 
					
						
						|  | "source_sentence": "That is a happy person", | 
					
						
						|  | "sentences": [ | 
					
						
						|  | "That is a happy dog", | 
					
						
						|  | "That is a very happy person", | 
					
						
						|  | "Today is a sunny day" | 
					
						
						|  | ] | 
					
						
						|  | }, | 
					
						
						|  | model="BAAI/bge-m3", | 
					
						
						|  | ) | 
					
						
						|  | ``` | 
					
						
						|  |  | 
					
						
						|  | ERROR: | 
					
						
						|  | ''') | 
					
						
						|  | traceback.print_exc(file=f) | 
					
						
						|  |  | 
					
						
						|  | finally: | 
					
						
						|  | from huggingface_hub import upload_file | 
					
						
						|  | upload_file( | 
					
						
						|  | path_or_fileobj='BAAI_bge-m3_2.txt', | 
					
						
						|  | repo_id='model-metadata/code_execution_files', | 
					
						
						|  | path_in_repo='BAAI_bge-m3_2.txt', | 
					
						
						|  | repo_type='dataset', | 
					
						
						|  | ) | 
					
						
						|  |  |