Skip to content
image-20250411230820853

问题

shell
我要参加今天5月份的系统架构师考试,帮我列出需要掌握的知识点
python
import streamlit as st
import time
import dashscope
import os

st.subheader("对话记录生成xmind")

def call_llm(apiKey, messages):
    response = dashscope.Generation.call(
        api_key=apiKey,
        model="deepseek-r1",
        messages=messages,
        stream=True  # 启用流式输出
    )
    try:
        first_chunk = next(response)  # 获取第一个 chunk 检查状态
        if first_chunk.status_code != 200:
            yield "生成失败", ""
            return
        yield first_chunk.output.choices[0].message.reasoning_content or "", \
              first_chunk.output.choices[0].message.content or ""
    except StopIteration:
        yield "生成失败", ""
        return
    
    # 处理流式输出
    for chunk in response:
        if st.session_state.stop_flag:  # 如果停止标志被设置为 True,则停止生成
            yield "生成已停止", ""
            break
        reasoning_content = chunk.output.choices[0].message.reasoning_content
        answer_content = chunk.output.choices[0].message.content
        yield reasoning_content or "", answer_content or ""
apiKey = os.getenv("DASHSCOPE_API_KEY")
apiKey = st.text_input("请输入API KEY",type="password",value= apiKey)

st.link_button("获取API Key", "https://bailian.console.aliyun.com/?tab=app#/api-key",type="secondary")


question = st.text_input("主题/问题?",value="帮我列出系统架构师考试需要掌握的知识点")
col1, col2 = st.columns([1, 8])

# 初始化
if "stop_flag" not in st.session_state:
    st.session_state.stop_flag = True

def process_question(question):
    print("process_question",question)
    # 倒计时
    with st.spinner("处理中...", show_time=True):
        output_placeholder = st.empty()
        messages = [
            {'role': 'user', 'content': question}
        ]
        time.sleep(3)
        full_answer_content = ""
        full_content = ''
      
        llm_stream = call_llm(apiKey, messages)
        for reasoning_chunk, answer_chunk in llm_stream:
            full_content += reasoning_chunk or ""
            full_content += answer_chunk or ""
            current_content = f'{reasoning_chunk}{answer_chunk}'
            output_placeholder.markdown(current_content)
            full_answer_content = answer_chunk
        md_filename = "output.md"
        with open(md_filename, "w", encoding="utf-8") as f:
            f.write(full_answer_content)
        
        with open(md_filename, "r", encoding="utf-8") as f:
            md_content = f.read()
        st.session_state.stop_flag = True
        st.download_button(
            label="下载对话记录 (.md)",
            data=md_content,
            file_name=md_filename,
            mime="text/markdown"
        )

with col1:
    if st.button("生成",type="primary"):
        if apiKey == "":
            st.error("请输入API KEY")
        if question == "":
            st.error("请输入问题")
        st.session_state.stop_flag = False

with col2:
    if st.button("停止"):
        st.session_state.stop_flag = True
        

if not st.session_state.stop_flag and apiKey and question:
    process_question(question)