ABAO77 commited on
Commit
d62d2dd
·
verified ·
1 Parent(s): b4c9cb7

Upload 67 files

Browse files
src/agents/agent_transcript/__pycache__/flow.cpython-311.pyc CHANGED
Binary files a/src/agents/agent_transcript/__pycache__/flow.cpython-311.pyc and b/src/agents/agent_transcript/__pycache__/flow.cpython-311.pyc differ
 
src/agents/agent_transcript/flow.py CHANGED
@@ -19,6 +19,13 @@ class AgentTranscript:
19
  def __init__(self):
20
  self.builder = StateGraph(State)
21
 
 
 
 
 
 
 
 
22
  def node(self):
23
  self.builder.add_node("trim_history", trim_history)
24
  self.builder.add_node(
@@ -38,30 +45,37 @@ class AgentTranscript:
38
  self.builder.add_edge(
39
  "extract_transcript_and_comment", "script_structure_analyzer"
40
  )
41
- self.builder.add_edge("script_structure_analyzer", "comment_insight_extractor")
 
 
 
 
 
 
 
42
  self.builder.add_edge("comment_insight_extractor", "scientific_fact_finder")
43
  self.builder.add_edge("scientific_fact_finder", "script_re_outline")
44
  self.builder.add_edge("script_re_outline", "script_writer_init")
45
-
46
  # Conditional routing for script writing
47
  self.builder.add_conditional_edges(
48
  "script_writer_init",
49
  should_continue_writing,
50
  {
51
  "script_writer_single": "script_writer_single",
52
- "script_writer_end": "script_writer_end"
53
- }
54
  )
55
-
56
  self.builder.add_conditional_edges(
57
  "script_writer_single",
58
  should_continue_writing,
59
  {
60
  "script_writer_single": "script_writer_single",
61
- "script_writer_end": "script_writer_end"
62
- }
63
  )
64
-
65
  self.builder.add_edge("script_writer_end", END)
66
 
67
  def __call__(self) -> CompiledStateGraph:
 
19
  def __init__(self):
20
  self.builder = StateGraph(State)
21
 
22
+ @staticmethod
23
+ def check_existed_comment(state: State):
24
+ if not state["comment"]:
25
+ return "scientific_fact_finder"
26
+ else:
27
+ return "comment_insight_extractor"
28
+
29
  def node(self):
30
  self.builder.add_node("trim_history", trim_history)
31
  self.builder.add_node(
 
45
  self.builder.add_edge(
46
  "extract_transcript_and_comment", "script_structure_analyzer"
47
  )
48
+ self.builder.add_conditional_edges(
49
+ "script_structure_analyzer",
50
+ self.check_existed_comment,
51
+ {
52
+ "scientific_fact_finder": "scientific_fact_finder",
53
+ "comment_insight_extractor": "comment_insight_extractor",
54
+ },
55
+ )
56
  self.builder.add_edge("comment_insight_extractor", "scientific_fact_finder")
57
  self.builder.add_edge("scientific_fact_finder", "script_re_outline")
58
  self.builder.add_edge("script_re_outline", "script_writer_init")
59
+
60
  # Conditional routing for script writing
61
  self.builder.add_conditional_edges(
62
  "script_writer_init",
63
  should_continue_writing,
64
  {
65
  "script_writer_single": "script_writer_single",
66
+ "script_writer_end": "script_writer_end",
67
+ },
68
  )
69
+
70
  self.builder.add_conditional_edges(
71
  "script_writer_single",
72
  should_continue_writing,
73
  {
74
  "script_writer_single": "script_writer_single",
75
+ "script_writer_end": "script_writer_end",
76
+ },
77
  )
78
+
79
  self.builder.add_edge("script_writer_end", END)
80
 
81
  def __call__(self) -> CompiledStateGraph:
src/apis/routers/__pycache__/gen_script.cpython-311.pyc CHANGED
Binary files a/src/apis/routers/__pycache__/gen_script.cpython-311.pyc and b/src/apis/routers/__pycache__/gen_script.cpython-311.pyc differ
 
src/apis/routers/gen_script.py CHANGED
@@ -28,121 +28,114 @@ async def message_generator(
28
  input_graph: dict,
29
  config: RunnableConfig,
30
  ):
31
- try:
32
- last_output_state = None
33
-
34
- try:
35
- async for event in script_writer_agent.astream(
36
- input=input_graph, stream_mode=["messages", "values"], config=config
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  ):
38
- try:
39
- event_type, event_message = event
40
- logger.info(f"Event type: {event_type}")
41
-
42
- if event_type == "messages":
43
- message, metadata = event_message
44
- if isinstance(message, AIMessageChunk):
45
- # Stream AI message chunks
46
- node = metadata.get("node")
47
- chunk_data = {
48
- "type": "message_chunk",
49
- "content": message.content,
50
- "metadata": metadata,
51
- "node_step": node,
52
- }
53
- logger.info(f"Chunk data: {chunk_data}")
54
- yield f"data: {json.dumps(chunk_data)}\n\n"
55
-
56
- elif event_type == "values":
57
- # Stream state updates
58
- state_data = {"type": "state_update", "state": event_message}
59
- last_output_state = event_message
60
-
61
- # Handle specific data extractions
62
- if (
63
- "transcript" in event_message
64
- and event_message["transcript"]
65
- ):
66
- transcript_data = {
67
- "type": "transcript_extracted",
68
- "transcript": (
69
- event_message["transcript"][:500] + "..."
70
- if len(event_message["transcript"]) > 500
71
- else event_message["transcript"]
72
- ),
73
- "full_length": len(event_message["transcript"]),
74
- }
75
- yield f"data: {json.dumps(transcript_data)}\n\n"
76
-
77
- if "comment" in event_message and event_message["comment"]:
78
- comment_data = {
79
- "type": "comment_extracted",
80
- "comment": (
81
- event_message["comment"][:500] + "..."
82
- if len(event_message["comment"]) > 500
83
- else event_message["comment"]
84
- ),
85
- "full_length": len(event_message["comment"]),
86
- }
87
- yield f"data: {json.dumps(comment_data)}\n\n"
88
-
89
- if "script_count" in event_message:
90
- script_count_data = {
91
- "type": "script_count_calculated",
92
- "script_count": event_message["script_count"],
93
- "target_word_count": event_message.get(
94
- "target_word_count", 8000
95
- ),
96
- }
97
- yield f"data: {json.dumps(script_count_data)}\n\n"
98
-
99
- # Handle individual script updates
100
- if (
101
- "script_writer_response" in event_message
102
- and "current_script_index" in event_message
103
- ):
104
- current_scripts = event_message["script_writer_response"]
105
- current_index = event_message["current_script_index"]
106
- script_count = event_message.get("script_count", 10)
107
-
108
- if current_scripts:
109
- individual_script_data = {
110
- "type": "individual_script",
111
- "script_index": current_index,
112
- "script_content": (
113
- current_scripts[-1] if current_scripts else ""
114
- ),
115
- "progress": f"{current_index}/{script_count}",
116
- "scripts": current_scripts,
117
- }
118
- yield f"data: {json.dumps(individual_script_data)}\n\n"
119
-
120
- yield f"data: {json.dumps(state_data, default=str)}\n\n"
121
-
122
- except Exception as e:
123
- logger.error(f"Error processing event: {e}")
124
- error_data = {"type": "error", "message": str(e)}
125
- yield f"data: {json.dumps(error_data)}\n\n"
126
-
127
- except Exception as e:
128
- logger.error(f"Error in streaming: {e}")
129
- error_data = {"type": "error", "message": str(e)}
130
- yield f"data: {json.dumps(error_data)}\n\n"
131
-
132
- # Send final result
133
- if last_output_state:
134
- final_data = {
135
- "type": "final_result",
136
- "scripts": last_output_state.get("script_writer_response", []),
137
- "total_scripts": len(
138
- last_output_state.get("script_writer_response", [])
139
- ),
140
- }
141
- yield f"data: {json.dumps(final_data, default=str)}\n\n"
142
-
143
- except Exception as e:
144
- logger.error(f"Fatal error in message_generator: {e}")
145
- yield f"data: {json.dumps({'type': 'fatal_error', 'message': str(e)})}\n\n"
146
 
147
 
148
  @router.post("/gen-script")
 
28
  input_graph: dict,
29
  config: RunnableConfig,
30
  ):
31
+ # try:
32
+ last_output_state = None
33
+
34
+ # try:
35
+ async for event in script_writer_agent.astream(
36
+ input=input_graph, stream_mode=["messages", "values"], config=config
37
+ ):
38
+ # try:
39
+ event_type, event_message = event
40
+ logger.info(f"Event type: {event_type}")
41
+
42
+ if event_type == "messages":
43
+ message, metadata = event_message
44
+ if isinstance(message, AIMessageChunk):
45
+ # Stream AI message chunks
46
+ node = metadata.get("node")
47
+ chunk_data = {
48
+ "type": "message_chunk",
49
+ "content": message.content,
50
+ "metadata": metadata,
51
+ "node_step": node,
52
+ }
53
+ logger.info(f"Chunk data: {chunk_data}")
54
+ yield f"data: {json.dumps(chunk_data)}\n\n"
55
+
56
+ elif event_type == "values":
57
+ # Stream state updates
58
+ state_data = {"type": "state_update", "state": event_message}
59
+ last_output_state = event_message
60
+
61
+ # Handle specific data extractions
62
+ if "transcript" in event_message and event_message["transcript"]:
63
+ transcript_data = {
64
+ "type": "transcript_extracted",
65
+ "transcript": (
66
+ event_message["transcript"][:500] + "..."
67
+ if len(event_message["transcript"]) > 500
68
+ else event_message["transcript"]
69
+ ),
70
+ "full_length": len(event_message["transcript"]),
71
+ }
72
+ yield f"data: {json.dumps(transcript_data)}\n\n"
73
+
74
+ if "comment" in event_message and event_message["comment"]:
75
+ comment_data = {
76
+ "type": "comment_extracted",
77
+ "comment": (
78
+ event_message["comment"][:500] + "..."
79
+ if len(event_message["comment"]) > 500
80
+ else event_message["comment"]
81
+ ),
82
+ "full_length": len(event_message["comment"]),
83
+ }
84
+ yield f"data: {json.dumps(comment_data)}\n\n"
85
+
86
+ if "script_count" in event_message:
87
+ script_count_data = {
88
+ "type": "script_count_calculated",
89
+ "script_count": event_message["script_count"],
90
+ "target_word_count": event_message.get("target_word_count", 8000),
91
+ }
92
+ yield f"data: {json.dumps(script_count_data)}\n\n"
93
+
94
+ # Handle individual script updates
95
+ if (
96
+ "script_writer_response" in event_message
97
+ and "current_script_index" in event_message
98
  ):
99
+ current_scripts = event_message["script_writer_response"]
100
+ current_index = event_message["current_script_index"]
101
+ script_count = event_message.get("script_count", 10)
102
+
103
+ if current_scripts:
104
+ individual_script_data = {
105
+ "type": "individual_script",
106
+ "script_index": current_index,
107
+ "script_content": (
108
+ current_scripts[-1] if current_scripts else ""
109
+ ),
110
+ "progress": f"{current_index}/{script_count}",
111
+ "scripts": current_scripts,
112
+ }
113
+ yield f"data: {json.dumps(individual_script_data)}\n\n"
114
+
115
+ yield f"data: {json.dumps(state_data, default=str)}\n\n"
116
+
117
+ # except Exception as e:
118
+ # logger.error(f"Error processing event: {e}")
119
+ # error_data = {"type": "error", "message": str(e)}
120
+ # yield f"data: {json.dumps(error_data)}\n\n"
121
+
122
+ # except Exception as e:
123
+ # logger.error(f"Error in streaming: {e}")
124
+ # error_data = {"type": "error", "message": str(e)}
125
+ # yield f"data: {json.dumps(error_data)}\n\n"
126
+
127
+ # Send final result
128
+ if last_output_state:
129
+ final_data = {
130
+ "type": "final_result",
131
+ "scripts": last_output_state.get("script_writer_response", []),
132
+ "total_scripts": len(last_output_state.get("script_writer_response", [])),
133
+ }
134
+ yield f"data: {json.dumps(final_data, default=str)}\n\n"
135
+
136
+ # except Exception as e:
137
+ # logger.error(f"Fatal error in message_generator: {e}")
138
+ # yield f"data: {json.dumps({'type': 'fatal_error', 'message': str(e)})}\n\n"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
139
 
140
 
141
  @router.post("/gen-script")
src/utils/__pycache__/helper.cpython-311.pyc CHANGED
Binary files a/src/utils/__pycache__/helper.cpython-311.pyc and b/src/utils/__pycache__/helper.cpython-311.pyc differ
 
src/utils/helper.py CHANGED
@@ -178,12 +178,15 @@ def extract_transcript(video_link: str):
178
  raise
179
 
180
 
181
-
182
  def extract_comment(video_link: str):
183
- ytd_api = YoutubeCommentDownloader()
184
- comments = ytd_api.get_comments_from_url(video_link)
185
- comments_str = ""
186
- for comment in comments:
187
- comments_str += comment["text"] + " "
188
- logger.info(f"Comments: {comments_str}")
189
- return comments_str
 
 
 
 
 
178
  raise
179
 
180
 
 
181
  def extract_comment(video_link: str):
182
+ try:
183
+ ytd_api = YoutubeCommentDownloader()
184
+ comments = ytd_api.get_comments_from_url(video_link)
185
+ comments_str = ""
186
+ for comment in comments:
187
+ comments_str += comment["text"] + " "
188
+ logger.info(f"Comments: {comments_str}")
189
+ return comments_str
190
+ except Exception as e:
191
+ logger.error(f"Failed to extract comments: {str(e)}")
192
+ return ""