Jobanpreet commited on
Commit
7df88e6
1 Parent(s): e9081a1

code updated

Browse files
.env ADDED
@@ -0,0 +1 @@
 
 
1
+ cohere_api_key="CTx2vcy4vjHeGC77cGGQXrCCz4xZYxlnPyFWZqqe"
__pycache__/advance_post.cpython-310.pyc CHANGED
Binary files a/__pycache__/advance_post.cpython-310.pyc and b/__pycache__/advance_post.cpython-310.pyc differ
 
__pycache__/blog_post.cpython-310.pyc CHANGED
Binary files a/__pycache__/blog_post.cpython-310.pyc and b/__pycache__/blog_post.cpython-310.pyc differ
 
__pycache__/paraphrase_post.cpython-310.pyc CHANGED
Binary files a/__pycache__/paraphrase_post.cpython-310.pyc and b/__pycache__/paraphrase_post.cpython-310.pyc differ
 
__pycache__/scrap_post.cpython-310.pyc CHANGED
Binary files a/__pycache__/scrap_post.cpython-310.pyc and b/__pycache__/scrap_post.cpython-310.pyc differ
 
__pycache__/streaming_response.cpython-310.pyc CHANGED
Binary files a/__pycache__/streaming_response.cpython-310.pyc and b/__pycache__/streaming_response.cpython-310.pyc differ
 
app.py CHANGED
@@ -40,18 +40,17 @@ elif temperature=="High accuracy":
40
  temperature=0
41
  temperature=temperature/10
42
 
43
- # stream_handler = StreamHandler(st.empty())
44
 
45
  if option=="GPT-4o":
46
  api_key=st.sidebar.text_input("API Key:",placeholder="Enter OpenAI API Key...")
47
  if api_key:
48
- stream_handler = StreamHandler(st.empty())
49
- model=ChatOpenAI(model="gpt-4o" , temperature=temperature , api_key=api_key , streaming=True, callbacks=[stream_handler])
50
  elif option=="Cohere":
51
  api_key= st.sidebar.text_input("API Key:",placeholder="Enter Cohere API Key...")
52
  if api_key:
53
- stream_handler = StreamHandler(st.empty())
54
- model = ChatCohere(cohere_api_key=api_key,temperature=temperature , streaming=True, callbacks=[stream_handler])
55
  #model = ChatCohere(cohere_api_key=api_key , temperature = temperature , model = "command-r-plus")
56
  # elif option=="Mixtral-8x7b":
57
  # api_key=st.sidebar.text_input("API Key:",placeholder="Enter NVIDIA NIM API Key...")
@@ -67,7 +66,6 @@ elif option=="Cohere":
67
  # model=HuggingFaceHub(repo_id="mistralai/Mixtral-8x22B-Instruct-v0.1",huggingfacehub_api_token=api_key ,model_kwargs={"temperature":temperature})
68
 
69
 
70
-
71
  if st.sidebar.toggle("Normal LinkedIn Post"):
72
  url = st.sidebar.text_input("Enter URL:", placeholder="Enter URL here...")
73
  if st.sidebar.button("Submit"):
@@ -93,31 +91,6 @@ if st.sidebar.toggle("Normal LinkedIn Post"):
93
 
94
 
95
 
96
- # paraphrase_text=st.text_area("Generated LinkedIn post",value=session_state.paraphrase, height=400)
97
- # import pyperclip
98
- # if st.button('Copy'):
99
- # pyperclip.copy(paraphrase_text)
100
- # st.success('Text copied successfully!')
101
- # from paraphrase_post import extract_data
102
- # # session_state.keywords , session_state.take_aways , session_state.highlights =extract_data(session_state.paraphrase , model)
103
- # from paraphrase_post import generate_details
104
- # if st.sidebar.button("Show Details"):
105
- # session_state.keywords =generate_details(session_state.paraphrase , model)
106
- # session_state.take_aways =generate_takeaways(session_state.paraphrase , model)
107
- # session_state.highlights =generate_highlights(session_state.paraphrase , model)
108
-
109
- # st.write("Keywords:")
110
- # for i, statement in enumerate(session_state.keywords, start=1):
111
- # st.write(f"{i}. {statement}")
112
-
113
- # st.write("Take Aways:")
114
- # for i, statement in enumerate(session_state.take_aways, start=1):
115
- # st.write(f"{i}. {statement}")
116
-
117
- # st.write("Highlights:")
118
- # for i, statement in enumerate(session_state.highlights, start=1):
119
- # st.write(f"{i}. {statement}")
120
-
121
  #------------------------------------------------------------Advance LinkedIn post code below-----------------------------------------------------------------
122
 
123
 
@@ -141,14 +114,14 @@ if st.sidebar.toggle("Advance LinkedIn Post"):
141
 
142
  if match:
143
  try:
144
- session_state.advancepost =google_search(url ,model , google_api_key,search_engine_id)
145
- # session_state.advancepost = advanced_post(all_links ,model ,session_state.paraphrase)
 
146
  except (openai.AuthenticationError) as e:
147
  st.sidebar.error("Enter your valid API key")
148
  else:
149
  st.sidebar.error("Put a valid LinkedIn post url only")
150
- # if len(docs)==0:
151
- # st.sidebar.error("Please Check your both credentials carefully")
152
 
153
  else:
154
  st.sidebar.error("Please enter Search Engine ID")
@@ -159,7 +132,7 @@ if st.sidebar.toggle("Advance LinkedIn Post"):
159
  else:
160
  st.sidebar.error("Please enter url")
161
 
162
- advance_post=st.text_area("Advance LinkedIn post",value=session_state.advancepost, height=400)
163
 
164
 
165
 
@@ -170,9 +143,6 @@ if st.sidebar.toggle("Message"):
170
  if st.sidebar.button("Generate Post"):
171
  if content:
172
  if api_key:
173
- # with st.container(height=500):
174
- # stream_handler = StreamHandler(st.empty())
175
- # model = ChatCohere(cohere_api_key=api_key,temperature=temperature , streaming=True, callbacks=[stream_handler])
176
  post_from_content(model ,content)
177
 
178
 
 
40
  temperature=0
41
  temperature=temperature/10
42
 
43
+
44
 
45
  if option=="GPT-4o":
46
  api_key=st.sidebar.text_input("API Key:",placeholder="Enter OpenAI API Key...")
47
  if api_key:
48
+
49
+ model=ChatOpenAI(model="gpt-4o" , temperature=temperature , api_key=api_key , streaming=True, callbacks=[StreamHandler(st.empty())])
50
  elif option=="Cohere":
51
  api_key= st.sidebar.text_input("API Key:",placeholder="Enter Cohere API Key...")
52
  if api_key:
53
+ model = ChatCohere(cohere_api_key=api_key,temperature=temperature , streaming=True, callbacks=[StreamHandler(st.empty())])
 
54
  #model = ChatCohere(cohere_api_key=api_key , temperature = temperature , model = "command-r-plus")
55
  # elif option=="Mixtral-8x7b":
56
  # api_key=st.sidebar.text_input("API Key:",placeholder="Enter NVIDIA NIM API Key...")
 
66
  # model=HuggingFaceHub(repo_id="mistralai/Mixtral-8x22B-Instruct-v0.1",huggingfacehub_api_token=api_key ,model_kwargs={"temperature":temperature})
67
 
68
 
 
69
  if st.sidebar.toggle("Normal LinkedIn Post"):
70
  url = st.sidebar.text_input("Enter URL:", placeholder="Enter URL here...")
71
  if st.sidebar.button("Submit"):
 
91
 
92
 
93
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
94
  #------------------------------------------------------------Advance LinkedIn post code below-----------------------------------------------------------------
95
 
96
 
 
114
 
115
  if match:
116
  try:
117
+ with st.spinner("Creating post ,please wait..."):
118
+ session_state.advancepost =google_search(url ,model , google_api_key,search_engine_id)
119
+
120
  except (openai.AuthenticationError) as e:
121
  st.sidebar.error("Enter your valid API key")
122
  else:
123
  st.sidebar.error("Put a valid LinkedIn post url only")
124
+
 
125
 
126
  else:
127
  st.sidebar.error("Please enter Search Engine ID")
 
132
  else:
133
  st.sidebar.error("Please enter url")
134
 
135
+ advance_post=st.write(session_state.advancepost)
136
 
137
 
138
 
 
143
  if st.sidebar.button("Generate Post"):
144
  if content:
145
  if api_key:
 
 
 
146
  post_from_content(model ,content)
147
 
148
 
paraphrase_post.py CHANGED
@@ -70,43 +70,3 @@ def generate_details(post_data,model):
70
  keywords=chain.invoke({"data":post_data})
71
  return keywords
72
 
73
-
74
-
75
-
76
-
77
- # # Below function extract the details such as keywords , Take aways , highlights and questions
78
- # def extract_data(post_data ,model):
79
- # keywords = ResponseSchema(name="Keywords",
80
- # description="These are the keywords extracted from LinkedIn post",type="list")
81
-
82
- # # Take_aways = ResponseSchema(name="Take Aways",
83
- # # description="These are the take aways extracted from LinkedIn post", type= "list")
84
- # # Highlights=ResponseSchema(name="Highlights",
85
- # # description="These are the highlights extracted from LinkedIn post", type= "list")
86
-
87
-
88
- # response_schema = [
89
- # keywords,
90
- # # Take_aways,
91
- # # Highlights
92
-
93
- # ]
94
- # output_parser = StructuredOutputParser.from_response_schemas(response_schema)
95
- # format_instructions = output_parser.get_format_instructions()
96
-
97
- # template = """
98
- # You are a helpful keywords extractor from the post of LinkedIn Bot. Your task is to extract relevant keywords in descending order of their scores in a list, means high relevant should be on the top .
99
- # From the following text message, extract the following information:
100
-
101
- # text message: {content}
102
- # {format_instructions}
103
- # """
104
-
105
- # prompt_template = ChatPromptTemplate.from_template(template)
106
- # messages = prompt_template.format_messages(content=post_data, format_instructions=format_instructions)
107
- # response = model(messages)
108
- # output_dict= output_parser.parse(response.content)
109
- # keywords=output_dict['Keywords'][:3]
110
- # # take_aways=output_dict['Take Aways'][:3]
111
- # # highlights=output_dict['Highlights'][:3]
112
- # return keywords
 
70
  keywords=chain.invoke({"data":post_data})
71
  return keywords
72
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
streaming_response.py CHANGED
@@ -11,26 +11,3 @@ class StreamHandler(BaseCallbackHandler):
11
  self.container.markdown(self.text)
12
 
13
 
14
- # with st.sidebar:
15
- # openai_api_key = st.text_input("OpenAI API Key", type="password")
16
-
17
- # if "messages" not in st.session_state:
18
- # st.session_state["messages"] = [ChatMessage(role="assistant", content="How can I help you?")]
19
-
20
- # for msg in st.session_state.messages:
21
- # st.chat_message(msg.role).write(msg.content)
22
-
23
- # if prompt := st.chat_input():
24
- # st.session_state.messages.append(ChatMessage(role="user", content=prompt))
25
- # st.chat_message("user").write(prompt)
26
-
27
- # if not openai_api_key:
28
- # st.info("Please add your OpenAI API key to continue.")
29
- # st.stop()
30
-
31
- # with st.chat_message("assistant"):
32
- # stream_handler = StreamHandler(st.empty())
33
- # # llm = ChatOpenAI(openai_api_key=openai_api_key, streaming=True, callbacks=[stream_handler])
34
- # llm = ChatCohere(openai_api_key=openai_api_key, streaming=True, callbacks=[stream_handler])
35
- # response = llm.invoke(st.session_state.messages)
36
- # st.session_state.messages.append(ChatMessage(role="assistant", content=response.content))
 
11
  self.container.markdown(self.text)
12
 
13