Update main.py
Browse files
main.py
CHANGED
@@ -4,7 +4,7 @@ from operator import itemgetter
|
|
4 |
from collections import Counter
|
5 |
from langchain_community.document_loaders import PyPDFLoader, TextLoader
|
6 |
from chainlit.types import AskFileResponse
|
7 |
-
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
8 |
from langchain.schema.runnable import Runnable, RunnablePassthrough, RunnableLambda
|
9 |
from langchain.schema.runnable.config import RunnableConfig
|
10 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
@@ -455,10 +455,10 @@ async def start():
|
|
455 |
await cl.Message(f"> Bonjour {welcomeUserStr}").send()
|
456 |
|
457 |
df_allcompetences = pd.read_csv('./public/referentiel_competence.csv')
|
458 |
-
df_allcompetences = df_allcompetences.sort_values(by=['libelle_competence'])
|
459 |
df_competences = df_allcompetences[['libelle_competence']].copy()
|
460 |
-
|
461 |
competences_list = df_competences['libelle_competence'].tolist()
|
|
|
462 |
competences_list.insert(0, "")
|
463 |
cl.user_session.set("arraySettingsComp", competences_list)
|
464 |
settings = await cl.ChatSettings(
|
@@ -483,7 +483,7 @@ async def start():
|
|
483 |
await cl.Message(author="Datapcc : 🌐🌐🌐",content="2️⃣ Puis sélectionnez ou saisissez une compétence ou des objectifs pédagogiques. Et vous êtes prêt!\n\n🔗 Plateforme de feedback et de fil d'activité : https://cloud.getliteral.ai/").send()
|
484 |
contextChat = cl.user_session.get("contextChatBot")
|
485 |
if not contextChat:
|
486 |
-
contextChat =
|
487 |
os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.environ['HUGGINGFACEHUB_API_TOKEN']
|
488 |
repo_id = "mistralai/Mistral-7B-Instruct-v0.3"
|
489 |
|
@@ -516,7 +516,7 @@ async def start():
|
|
516 |
|
517 |
@literal_client.step(type="run")
|
518 |
async def construction_NCS(competenceList):
|
519 |
-
context = await contexte(
|
520 |
emploisST = cl.user_session.get("EmploiST")
|
521 |
memory = ConversationBufferMemory(return_messages=True)
|
522 |
### Mistral Completion ###
|
@@ -610,17 +610,19 @@ async def recuperation_contexte(getNote):
|
|
610 |
return getNote + " :\n" + getContext
|
611 |
@cl.step(type="retrieval")
|
612 |
async def contexte(romeListArray):
|
613 |
-
results = await API_FranceTravail(romeListArray)
|
614 |
-
|
615 |
-
|
616 |
-
|
617 |
-
|
618 |
-
|
619 |
-
|
620 |
-
|
621 |
-
|
622 |
-
|
623 |
-
#
|
|
|
|
|
624 |
ficheMetier = await FicheMetier("https://candidat.francetravail.fr/metierscope/fiche-metier/", romeListArray[0])
|
625 |
ficheClesMetier = await ChiffresClesMetier("https://dataemploi.francetravail.fr/metier/chiffres-cles/NAT/FR/", romeListArray[0])
|
626 |
#ficheMetiersCompetencesSavoirs = await Fiche_metier_competences_savoirs(romeListArray[0])
|
@@ -631,83 +633,6 @@ async def contexte(romeListArray):
|
|
631 |
#return "Liste des emplois issus de France Travail :\n" + context
|
632 |
#return "\nMetier secteur contexte au travail :\n" + metierSecteurContexteTravail + "\nListe des emplois issus de France Travail :\n" + context
|
633 |
@cl.step(type="tool")
|
634 |
-
async def Metier_secteur_contexte_travail(codes):
|
635 |
-
payload = {
|
636 |
-
'grant_type': 'client_credentials',
|
637 |
-
'client_id': 'PAR_datalabapc_54c735e1b592af9d016cf0e45f8973082303609fc997f0821a9b308c07995251',
|
638 |
-
'client_secret': 'b968556f8b4bf2c42af42498304bab0d76edcef23ed4723bbd621ae317a6657e',
|
639 |
-
'scope': 'api_rome-metiersv1'
|
640 |
-
}
|
641 |
-
r = requests.post("https://entreprise.pole-emploi.fr/connexion/oauth2/access_token?realm=/partenaire",
|
642 |
-
headers={"Content-Type":"application/x-www-form-urlencoded"},
|
643 |
-
data=payload)
|
644 |
-
|
645 |
-
data = "[" + r.content.decode("utf-8") + "]"
|
646 |
-
load_json = json.loads(data)
|
647 |
-
|
648 |
-
token = next(d['access_token'] for d in load_json if d['scope'] == 'api_rome-metiersv1')
|
649 |
-
conn = http.client.HTTPSConnection("api.pole-emploi.io")
|
650 |
-
|
651 |
-
headers = {
|
652 |
-
'Authorization': "Bearer " + token,
|
653 |
-
'Accept': "application/json, */*"
|
654 |
-
}
|
655 |
-
dataset = ''
|
656 |
-
if codes.find(',') != -1:
|
657 |
-
codeArray = codes.split(',')
|
658 |
-
for i in range(0,len(codeArray)):
|
659 |
-
conn.request("GET", "/partenaire/rome-metiers/v1/metiers/metier/" + codeArray[i], headers=headers)
|
660 |
-
res = conn.getresponse()
|
661 |
-
data = res.read()
|
662 |
-
datas = data.decode("utf-8")
|
663 |
-
dataset += str(datas.replace('"','').replace('{','').replace('}','').replace('[','').replace(']','').replace('code','').replace('libelle','').replace(',:',', ').replace('::',':'))
|
664 |
-
else:
|
665 |
-
conn.request("GET", "/partenaire/rome-metiers/v1/metiers/metier/" + codes, headers=headers)
|
666 |
-
res = conn.getresponse()
|
667 |
-
data = res.read()
|
668 |
-
datas = data.decode("utf-8")
|
669 |
-
dataset += str(datas.replace('"','').replace('{','').replace('}','').replace('[','').replace(']','').replace('code','').replace('libelle','').replace(',:',', ').replace('::',':'))
|
670 |
-
return dataset
|
671 |
-
|
672 |
-
@cl.step(type="tool")
|
673 |
-
async def Fiche_metier_competences_savoirs(codes):
|
674 |
-
payload = {
|
675 |
-
'grant_type': 'client_credentials',
|
676 |
-
'client_id': 'PAR_datalabapc_54c735e1b592af9d016cf0e45f8973082303609fc997f0821a9b308c07995251',
|
677 |
-
'client_secret': 'b968556f8b4bf2c42af42498304bab0d76edcef23ed4723bbd621ae317a6657e',
|
678 |
-
'scope': 'api_rome-fiches-metiersv1'
|
679 |
-
}
|
680 |
-
r = requests.post("https://entreprise.pole-emploi.fr/connexion/oauth2/access_token?realm=/partenaire",
|
681 |
-
headers={"Content-Type":"application/x-www-form-urlencoded"},
|
682 |
-
data=payload)
|
683 |
-
|
684 |
-
data = "[" + r.content.decode("utf-8") + "]"
|
685 |
-
load_json = json.loads(data)
|
686 |
-
token = next(d['access_token'] for d in load_json if d['scope'] == 'api_rome-fiches-metiersv1')
|
687 |
-
conn = http.client.HTTPSConnection("api.pole-emploi.io")
|
688 |
-
|
689 |
-
headers = {
|
690 |
-
'Authorization': "Bearer " + token,
|
691 |
-
'Accept': "application/json, */*"
|
692 |
-
}
|
693 |
-
dataset = ''
|
694 |
-
if codes.find(',') != -1:
|
695 |
-
codeArray = codes.split(',')
|
696 |
-
for i in range(0,len(codeArray)):
|
697 |
-
conn.request("GET", "/partenaire/rome-fiches-metiers/v1/fiches-rome/fiche-metier/" + codeArray[i], headers=headers)
|
698 |
-
res = conn.getresponse()
|
699 |
-
data = res.read()
|
700 |
-
datas = data.decode("utf-8")
|
701 |
-
dataset += str(datas.replace('"','').replace('{','').replace('}','').replace('[','').replace(']','').replace('code','').replace('libelle','').replace(',:',', ').replace('::',':'))
|
702 |
-
else:
|
703 |
-
conn.request("GET", "/partenaire/rome-fiches-metiers/v1/fiches-rome/fiche-metier/" + codes, headers=headers)
|
704 |
-
res = conn.getresponse()
|
705 |
-
data = res.read()
|
706 |
-
datas = data.decode("utf-8")
|
707 |
-
dataset += str(datas.replace('"','').replace('{','').replace('}','').replace('[','').replace(']','').replace('code','').replace('libelle','').replace(',:',', ').replace('::',':'))
|
708 |
-
return dataset
|
709 |
-
|
710 |
-
@cl.step(type="tool")
|
711 |
async def FicheMetier(url, codes):
|
712 |
if codes.find(',') != -1:
|
713 |
all = ""
|
@@ -824,59 +749,88 @@ async def ChiffresClesMetier(url, codes):
|
|
824 |
return all
|
825 |
|
826 |
@cl.step(type="tool")
|
827 |
-
async def
|
828 |
-
|
829 |
-
|
830 |
-
|
831 |
-
|
832 |
-
|
833 |
-
|
834 |
-
|
835 |
-
|
836 |
-
|
837 |
-
|
838 |
-
|
839 |
-
|
840 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
841 |
else:
|
842 |
-
|
843 |
-
if
|
844 |
-
|
845 |
-
|
846 |
-
|
847 |
-
|
848 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
849 |
|
850 |
@cl.step(type="tool")
|
851 |
-
async def
|
852 |
os.environ['PINECONE_API_KEY'] = os.environ['PINECONE_API_KEY']
|
853 |
os.environ['PINECONE_INDEX_NAME'] = os.environ['PINECONE_INDEX_NAME']
|
854 |
-
os.environ['PINECONE_ENVIRONMENT'] = os.environ['PINECONE_ENVIRONMENT']
|
855 |
embeddings = HuggingFaceEmbeddings()
|
856 |
-
docsearch = PineconeVectorStore(
|
857 |
return docsearch
|
858 |
|
859 |
@cl.step(type="tool")
|
860 |
-
async def
|
861 |
-
os.environ['PINECONE_API_KEYROME'] = os.environ['PINECONE_API_KEYROME']
|
862 |
-
pc = Pinecone(api_key=os.environ['PINECONE_API_KEYROME'])
|
863 |
-
index_name = "all-skills"
|
864 |
-
index = pc.Index(index_name)
|
865 |
-
|
866 |
-
return index
|
867 |
-
|
868 |
-
@cl.step(type="tool")
|
869 |
-
async def API_FranceTravail(romeListArray):
|
870 |
client = Api(client_id=os.environ['POLE_EMPLOI_CLIENT_ID'],
|
871 |
client_secret=os.environ['POLE_EMPLOI_CLIENT_SECRET'])
|
872 |
todayDate = datetime.datetime.today()
|
873 |
month, year = (todayDate.month-1, todayDate.year) if todayDate.month != 1 else (12, todayDate.year-1)
|
874 |
start_dt = todayDate.replace(day=1, month=month, year=year)
|
875 |
end_dt = datetime.datetime.today()
|
876 |
-
|
877 |
-
|
878 |
-
|
879 |
-
|
|
|
|
|
|
|
880 |
|
881 |
@cl.step(type="llm")
|
882 |
async def IA():
|
@@ -919,7 +873,6 @@ async def setup_agent(settings):
|
|
919 |
contextChat = "Il n'y a pas de contexte."
|
920 |
|
921 |
os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.environ['HUGGINGFACEHUB_API_TOKEN']
|
922 |
-
#repo_id = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
923 |
repo_id = "mistralai/Mistral-7B-Instruct-v0.3"
|
924 |
|
925 |
model = HuggingFaceEndpoint(
|
|
|
4 |
from collections import Counter
|
5 |
from langchain_community.document_loaders import PyPDFLoader, TextLoader
|
6 |
from chainlit.types import AskFileResponse
|
7 |
+
from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter
|
8 |
from langchain.schema.runnable import Runnable, RunnablePassthrough, RunnableLambda
|
9 |
from langchain.schema.runnable.config import RunnableConfig
|
10 |
from langchain_community.embeddings import HuggingFaceEmbeddings
|
|
|
455 |
await cl.Message(f"> Bonjour {welcomeUserStr}").send()
|
456 |
|
457 |
df_allcompetences = pd.read_csv('./public/referentiel_competence.csv')
|
|
|
458 |
df_competences = df_allcompetences[['libelle_competence']].copy()
|
459 |
+
df_competences = df_competences.sort_values(by=['libelle_competence'])
|
460 |
competences_list = df_competences['libelle_competence'].tolist()
|
461 |
+
competences_list.sort()
|
462 |
competences_list.insert(0, "")
|
463 |
cl.user_session.set("arraySettingsComp", competences_list)
|
464 |
settings = await cl.ChatSettings(
|
|
|
483 |
await cl.Message(author="Datapcc : 🌐🌐🌐",content="2️⃣ Puis sélectionnez ou saisissez une compétence ou des objectifs pédagogiques. Et vous êtes prêt!\n\n🔗 Plateforme de feedback et de fil d'activité : https://cloud.getliteral.ai/").send()
|
484 |
contextChat = cl.user_session.get("contextChatBot")
|
485 |
if not contextChat:
|
486 |
+
contextChat = df_competences.to_string(index = False)
|
487 |
os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.environ['HUGGINGFACEHUB_API_TOKEN']
|
488 |
repo_id = "mistralai/Mistral-7B-Instruct-v0.3"
|
489 |
|
|
|
516 |
|
517 |
@literal_client.step(type="run")
|
518 |
async def construction_NCS(competenceList):
|
519 |
+
context = await contexte(competenceList)
|
520 |
emploisST = cl.user_session.get("EmploiST")
|
521 |
memory = ConversationBufferMemory(return_messages=True)
|
522 |
### Mistral Completion ###
|
|
|
610 |
return getNote + " :\n" + getContext
|
611 |
@cl.step(type="retrieval")
|
612 |
async def contexte(romeListArray):
|
613 |
+
#results = await API_FranceTravail(romeListArray)
|
614 |
+
results = await creation_liste_code_Rome_et_emplois(romeListArray)
|
615 |
+
results_df = results[1]
|
616 |
+
finals = results_df[['intitule','typeContratLibelle','experienceLibelle','competences','qualitesProfessionnelles','salaire','lieuTravail','formations','description']].copy()
|
617 |
+
finals["lieuTravail"] = finals["lieuTravail"].apply(lambda x: x['libelle']).apply(lambda x: x[0:3]).apply(lambda x: x.strip())
|
618 |
+
emplois = results_df.values.tolist()
|
619 |
+
|
620 |
+
listEmplois = []
|
621 |
+
for i in range(0,len(emplois)):
|
622 |
+
listEmplois.append("\nEmploi : " + emplois[i][0] + "; Contrat : " + emplois[i][1] + "; Compétences professionnelles : " + arrayToString(emplois[i][3]) + "; " + "Salaire : " + listToString(emplois[i][5]) + "; Qualification : " + emplois[i][4] + "; Localisation : " + emplois[i][6] + "; Expérience : " + emplois[i][2] + "; Niveau de qualification : " + listToString(emplois[i][7]) + "; Description de l'emploi : " + listToString(emplois[i][8]))
|
623 |
+
#emplois_list = results[0]
|
624 |
+
#context = emplois_list.replace('[','').replace(']','').replace('{','').replace('}','')
|
625 |
+
context = listEmplois
|
626 |
ficheMetier = await FicheMetier("https://candidat.francetravail.fr/metierscope/fiche-metier/", romeListArray[0])
|
627 |
ficheClesMetier = await ChiffresClesMetier("https://dataemploi.francetravail.fr/metier/chiffres-cles/NAT/FR/", romeListArray[0])
|
628 |
#ficheMetiersCompetencesSavoirs = await Fiche_metier_competences_savoirs(romeListArray[0])
|
|
|
633 |
#return "Liste des emplois issus de France Travail :\n" + context
|
634 |
#return "\nMetier secteur contexte au travail :\n" + metierSecteurContexteTravail + "\nListe des emplois issus de France Travail :\n" + context
|
635 |
@cl.step(type="tool")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
636 |
async def FicheMetier(url, codes):
|
637 |
if codes.find(',') != -1:
|
638 |
all = ""
|
|
|
749 |
return all
|
750 |
|
751 |
@cl.step(type="tool")
|
752 |
+
async def creation_liste_code_Rome_et_emplois(competence):
|
753 |
+
os.environ['PINECONE_API_KEYROME'] = os.environ['PINECONE_API_KEYROME']
|
754 |
+
docsearch = await connexion_catalogue_Rome()
|
755 |
+
retrieve_comp = docsearch.similarity_search(competence, k=30, filter={"categorie": {"$eq": os.environ['PINECONE_API_KEYROME']}})
|
756 |
+
retrieve = pd.DataFrame(retrieve_comp)
|
757 |
+
codeRome = []
|
758 |
+
competence = []
|
759 |
+
metier = []
|
760 |
+
for i in range(0,len(retrieve_comp)):
|
761 |
+
codeRome.append(retrieve_comp[i].metadata['code_rome'])
|
762 |
+
competence.append(retrieve_comp[i].metadata['libelle_competence'])
|
763 |
+
metier.append(retrieve_comp[i].metadata['libelle_appellation_long'])
|
764 |
+
|
765 |
+
results_df = pd.DataFrame({'codeRome': codeRome,'competence': competence, 'metier': metier})
|
766 |
+
arrayresults = results_df.values.tolist()
|
767 |
+
displayresults = '<table border="1" cellpadding="0" cellspacing="0"><tr><td>Code Rome</td><td>Compétence</td><td>Métier</td></tr>'
|
768 |
+
for j in range(0, len(arrayresults)):
|
769 |
+
displayresults += '<tr><td>' + arrayresults[j][0] + '</td><td>' + arrayresults[j][1] + '</td><td>' + arrayresults[j][2] + '</td></tr>
|
770 |
+
|
771 |
+
displayresults = '<table>'
|
772 |
+
await cl.Message(author="Datapcc : 🌐🌐🌐",content="<p>Voici le résultat de la recherche sémantique sur le catalogue Rome :</p>" + displayresult).send()
|
773 |
+
|
774 |
+
results_df = results_df.drop_duplicates(subset=["codeRome"])
|
775 |
+
results_df = results_df.head(5)
|
776 |
+
codeRomeString = results_df["codeRome"].to_string(index = False)
|
777 |
+
codeRome_list = results_df["codeRome"].tolist()
|
778 |
+
actionRome = await cl.AskActionMessage(
|
779 |
+
content="Etes-vous d'accord avec la sélection des 5 codes Rome automatiques issus de la recherche sémantique ? :" + codeRomeString,
|
780 |
+
actions=[
|
781 |
+
cl.Action(name="continue", value="continue", label="✅ Oui, je veux continuer vers l'extraction en temps réel des offres d'emploi"),
|
782 |
+
cl.Action(name="cancel", value="cancel", label="❌ Non, je veux saisir ma liste de codes Rome, séparés par des virgules"),
|
783 |
+
],
|
784 |
+
).send()
|
785 |
+
if actionRome and actionRome.get("value") == "continue":
|
786 |
+
await cl.Message(
|
787 |
+
content="Connexion à France Travail, et récupération des offres d'emploi",
|
788 |
+
).send()
|
789 |
+
df_emplois = await API_France_Travail(codeRome_list)
|
790 |
+
cl.user_session.set("codeRomeArray", codeRome_list)
|
791 |
else:
|
792 |
+
actionsaisierome = await cl.AskUserMessage(content="Saisissez vos codes Rome dans le prompt? ⚠️ Attention, indiquez seulement des codes Rome séparés par des virgules", timeout=3600).send()
|
793 |
+
if actionsaisierome:
|
794 |
+
await cl.Message(
|
795 |
+
content=f"Votre saisie est : {actionsaisierome['output']}",
|
796 |
+
).send()
|
797 |
+
stringCodeRome = actionsaisierome['output'].replace(' ','')
|
798 |
+
stopWords = [';','.',':','!','|']
|
799 |
+
teststringCodeRome = [ele for ele in stopWords if(ele in stringCodeRome)]
|
800 |
+
teststringCodeRome = bool(teststringCodeRome)
|
801 |
+
if teststringCodeRome == False:
|
802 |
+
arrayCodeRome = stringCodeRome.spit(',')
|
803 |
+
else:
|
804 |
+
arrayCodeRome = codeRome_list
|
805 |
+
await cl.Message(author="Datapcc : 🌐🌐🌐",content="Votre ssaisie est erronée. Nous continuons l'action avec les codes Rome sélectionnés automatiquement pour vous : " + codeRome_list).send()
|
806 |
+
df_emplois = await API_France_Travail(arrayCodeRome)
|
807 |
+
cl.user_session.set("codeRomeArray", arrayCodeRome)
|
808 |
+
|
809 |
+
return df_emplois
|
810 |
|
811 |
@cl.step(type="tool")
|
812 |
+
async def connexion_catalogue_Rome():
|
813 |
os.environ['PINECONE_API_KEY'] = os.environ['PINECONE_API_KEY']
|
814 |
os.environ['PINECONE_INDEX_NAME'] = os.environ['PINECONE_INDEX_NAME']
|
|
|
815 |
embeddings = HuggingFaceEmbeddings()
|
816 |
+
docsearch = PineconeVectorStore.from_existing_index(os.environ['PINECONE_INDEX_NAME'], embeddings)
|
817 |
return docsearch
|
818 |
|
819 |
@cl.step(type="tool")
|
820 |
+
async def API_France_Travail(romeListArray):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
821 |
client = Api(client_id=os.environ['POLE_EMPLOI_CLIENT_ID'],
|
822 |
client_secret=os.environ['POLE_EMPLOI_CLIENT_SECRET'])
|
823 |
todayDate = datetime.datetime.today()
|
824 |
month, year = (todayDate.month-1, todayDate.year) if todayDate.month != 1 else (12, todayDate.year-1)
|
825 |
start_dt = todayDate.replace(day=1, month=month, year=year)
|
826 |
end_dt = datetime.datetime.today()
|
827 |
+
results = []
|
828 |
+
for k in romeListArray:
|
829 |
+
params = {"motsCles": k,'minCreationDate': dt_to_str_iso(start_dt),'maxCreationDate': dt_to_str_iso(end_dt),'range':'0-149'}
|
830 |
+
search_on_big_data = client.search(params=params)
|
831 |
+
results += search_on_big_data["resultats"]
|
832 |
+
results_df = pd.DataFrame(results)
|
833 |
+
return [list_emplois, df_emplois]
|
834 |
|
835 |
@cl.step(type="llm")
|
836 |
async def IA():
|
|
|
873 |
contextChat = "Il n'y a pas de contexte."
|
874 |
|
875 |
os.environ['HUGGINGFACEHUB_API_TOKEN'] = os.environ['HUGGINGFACEHUB_API_TOKEN']
|
|
|
876 |
repo_id = "mistralai/Mistral-7B-Instruct-v0.3"
|
877 |
|
878 |
model = HuggingFaceEndpoint(
|