Commit 9cd9f890 authored by wanglch's avatar wanglch
Browse files

Initial commit

parents
Pipeline #2393 failed with stages
in 0 seconds
# Copyright (c) 2023-2024 DeepSeek.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from functools import wraps
import gradio as gr
def wrap_gen_fn(gen_fn):
@wraps(gen_fn)
def wrapped_gen_fn(prompt, *args, **kwargs):
try:
yield from gen_fn(prompt, *args, **kwargs)
except gr.Error as g_err:
raise g_err
except Exception as e:
raise gr.Error(f"Failed to generate text: {e}") from e
return wrapped_gen_fn
def delete_last_conversation(chatbot, history):
if len(history) % 2 != 0:
gr.Error("history length is not even")
return (
chatbot,
history,
"Delete Done",
)
if len(chatbot) > 0:
chatbot.pop()
if len(history) > 0 and len(history) % 2 == 0:
history.pop()
history.pop()
return (
chatbot,
history,
"Delete Done",
)
def reset_state():
return [], [], None, "Reset Done"
def reset_textbox():
return gr.update(value=""), ""
def cancel_outputing():
return "Stop Done"
class State:
interrupted = False
def interrupt(self):
self.interrupted = True
def recover(self):
self.interrupted = False
shared_state = State()
# Copyright (c) 2023-2024 DeepSeek.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import annotations
import logging
from typing import List, Tuple
from deepseek_vl2.serve.app_modules.presets import gr
from deepseek_vl2.serve.app_modules.utils import convert_asis, convert_mdtext, detect_converted_mark
def compact_text_chunks(self, prompt, text_chunks: List[str]) -> List[str]:
logging.debug("Compacting text chunks...🚀🚀🚀")
combined_str = [c.strip() for c in text_chunks if c.strip()]
combined_str = [f"[{index+1}] {c}" for index, c in enumerate(combined_str)]
combined_str = "\n\n".join(combined_str)
# resplit based on self.max_chunk_overlap
text_splitter = self.get_text_splitter_given_prompt(prompt, 1, padding=1)
return text_splitter.split_text(combined_str)
def postprocess(
self, y: List[Tuple[str | None, str | None]]
) -> List[Tuple[str | None, str | None]]:
"""
Parameters:
y: List of tuples representing the message and response pairs. Each message and response should be a string, which may be in Markdown format.
Returns:
List of tuples representing the message and response. Each message and response will be a string of HTML.
"""
if y is None or y == []:
return []
temp = []
for x in y:
user, bot = x
if not detect_converted_mark(user):
user = convert_asis(user)
if not detect_converted_mark(bot):
bot = convert_mdtext(bot)
temp.append((user, bot))
return temp
with open("deepseek_vl2/serve/assets/custom.js", "r", encoding="utf-8") as f, open(
"deepseek_vl2/serve/assets/Kelpy-Codos.js", "r", encoding="utf-8"
) as f2:
customJS = f.read()
kelpyCodos = f2.read()
def reload_javascript():
print("Reloading javascript...")
js = f"<script>{customJS}</script><script>{kelpyCodos}</script>"
def template_response(*args, **kwargs):
res = GradioTemplateResponseOriginal(*args, **kwargs)
res.body = res.body.replace(b"</html>", f"{js}</html>".encode("utf8"))
res.init_headers()
return res
gr.routes.templates.TemplateResponse = template_response
GradioTemplateResponseOriginal = gr.routes.templates.TemplateResponse
# Copyright (c) 2023-2024 DeepSeek.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# -*- coding:utf-8 -*-
import gradio as gr
title = """<h1 align="left" style="min-width:200px; margin-top:0;">Chat with DeepSeek-VL2 </h1>"""
description_top = """Special Tokens: `<image>`, Visual Grounding: `<|ref|>{query}<|/ref|>`, Grounding Conversation: `<|grounding|>{question}`"""
description = """"""
CONCURRENT_COUNT = 1
MAX_EVENTS = 10
MAX_IMAGE_SIZE = 800
MIN_IMAGE_SIZE = 400
BOX2COLOR = {
0: (255, 0, 0),
1: (0, 255, 0),
2: (0, 0, 255),
3: (0, 255, 255),
4: (255, 255, 0),
5: (255, 0, 255),
6: (127, 127, 127),
7: (255, 255, 127),
8: (255, 127, 255),
9: (127, 255, 255),
10: (127, 127, 255),
11: (127, 255, 127),
12: (255, 127, 127),
}
ALREADY_CONVERTED_MARK = "<!-- ALREADY CONVERTED BY PARSER. -->"
small_and_beautiful_theme = gr.themes.Soft(
primary_hue=gr.themes.Color(
c50="#EBFAF2",
c100="#CFF3E1",
c200="#A8EAC8",
c300="#77DEA9",
c400="#3FD086",
c500="#02C160",
c600="#06AE56",
c700="#05974E",
c800="#057F45",
c900="#04673D",
c950="#2E5541",
name="small_and_beautiful",
),
secondary_hue=gr.themes.Color(
c50="#576b95",
c100="#576b95",
c200="#576b95",
c300="#576b95",
c400="#576b95",
c500="#576b95",
c600="#576b95",
c700="#576b95",
c800="#576b95",
c900="#576b95",
c950="#576b95",
),
neutral_hue=gr.themes.Color(
name="gray",
c50="#f6f7f8",
# c100="#f3f4f6",
c100="#F2F2F2",
c200="#e5e7eb",
c300="#d1d5db",
c400="#B2B2B2",
c500="#808080",
c600="#636363",
c700="#515151",
c800="#393939",
# c900="#272727",
c900="#2B2B2B",
c950="#171717",
),
radius_size=gr.themes.sizes.radius_sm,
).set(
# button_primary_background_fill="*primary_500",
button_primary_background_fill_dark="*primary_600",
# button_primary_background_fill_hover="*primary_400",
# button_primary_border_color="*primary_500",
button_primary_border_color_dark="*primary_600",
button_primary_text_color="white",
button_primary_text_color_dark="white",
button_secondary_background_fill="*neutral_100",
button_secondary_background_fill_hover="*neutral_50",
button_secondary_background_fill_dark="*neutral_900",
button_secondary_text_color="*neutral_800",
button_secondary_text_color_dark="white",
# background_fill_primary="#F7F7F7",
# background_fill_primary_dark="#1F1F1F",
# block_title_text_color="*primary_500",
block_title_background_fill_dark="*primary_900",
block_label_background_fill_dark="*primary_900",
input_background_fill="#F6F6F6",
# chatbot_code_background_color_dark="*neutral_950",
)
# Copyright (c) 2023-2024 DeepSeek.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# -*- coding:utf-8 -*-
from __future__ import annotations
import html
import logging
import io
import os
import re
import base64
import time
from PIL import Image, ImageDraw, ImageFont
import mdtex2html
from markdown import markdown
from pygments import highlight
from pygments.formatters import HtmlFormatter
from pygments.lexers import ClassNotFound, get_lexer_by_name, guess_lexer
from deepseek_vl2.serve.app_modules.presets import (
ALREADY_CONVERTED_MARK,
BOX2COLOR,
MAX_IMAGE_SIZE,
MIN_IMAGE_SIZE
)
logger = logging.getLogger("gradio_logger")
def configure_logger():
logger = logging.getLogger("gradio_logger")
logger.setLevel(logging.DEBUG)
timestr = time.strftime("%Y%m%d-%H%M%S")
os.makedirs("deepseek_vl2/serve/logs", exist_ok=True)
file_handler = logging.FileHandler(
f"deepseek_vl2/serve/logs/{timestr}_gradio_log.log"
)
console_handler = logging.StreamHandler()
formatter = logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
console_handler.setFormatter(formatter)
file_handler.setFormatter(formatter)
console_handler.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
logger.addHandler(console_handler)
logger.addHandler(file_handler)
return logger
def strip_stop_words(x, stop_words):
for w in stop_words:
if w in x:
return x[: x.index(w)].strip()
return x.strip()
def format_output(history, text, x):
updated_history = history + [[text, x]]
a = [[y[0], convert_to_markdown(y[1])] for y in updated_history]
return a, updated_history
def markdown_to_html_with_syntax_highlight(md_str): # deprecated
def replacer(match):
lang = match.group(1) or "text"
code = match.group(2)
try:
lexer = get_lexer_by_name(lang, stripall=True)
except ValueError:
lexer = get_lexer_by_name("text", stripall=True)
formatter = HtmlFormatter()
highlighted_code = highlight(code, lexer, formatter)
return f'<pre><code class="{lang}">{highlighted_code}</code></pre>'
code_block_pattern = r"```(\w+)?\n([\s\S]+?)\n```"
md_str = re.sub(code_block_pattern, replacer, md_str, flags=re.MULTILINE)
html_str = markdown(md_str)
return html_str
def normalize_markdown(md_text: str) -> str: # deprecated
lines = md_text.split("\n")
normalized_lines = []
inside_list = False
for i, line in enumerate(lines):
if re.match(r"^(\d+\.|-|\*|\+)\s", line.strip()):
if not inside_list and i > 0 and lines[i - 1].strip() != "":
normalized_lines.append("")
inside_list = True
normalized_lines.append(line)
elif inside_list and line.strip() == "":
if i < len(lines) - 1 and not re.match(
r"^(\d+\.|-|\*|\+)\s", lines[i + 1].strip()
):
normalized_lines.append(line)
continue
else:
inside_list = False
normalized_lines.append(line)
return "\n".join(normalized_lines)
def convert_mdtext(md_text):
code_block_pattern = re.compile(r"```(.*?)(?:```|$)", re.DOTALL)
inline_code_pattern = re.compile(r"`(.*?)`", re.DOTALL)
code_blocks = code_block_pattern.findall(md_text)
non_code_parts = code_block_pattern.split(md_text)[::2]
result = []
for non_code, code in zip(non_code_parts, code_blocks + [""]):
if non_code.strip():
non_code = normalize_markdown(non_code)
if inline_code_pattern.search(non_code):
result.append(markdown(non_code, extensions=["tables"]))
else:
result.append(mdtex2html.convert(non_code, extensions=["tables"]))
if code.strip():
code = f"\n```{code}\n\n```"
code = markdown_to_html_with_syntax_highlight(code)
result.append(code)
result = "".join(result)
result += ALREADY_CONVERTED_MARK
return result
def convert_asis(userinput):
return f'<p style="white-space:pre-wrap;">{html.escape(userinput)}</p>{ALREADY_CONVERTED_MARK}'
def is_stop_word_or_prefix(s: str, stop_words: list) -> bool:
return any(s.endswith(stop_word) for stop_word in stop_words)
def detect_converted_mark(userinput):
return bool(userinput.endswith(ALREADY_CONVERTED_MARK))
def detect_language(code):
first_line = "" if code.startswith("\n") else code.strip().split("\n", 1)[0]
language = first_line.lower() if first_line else ""
code_without_language = code[len(first_line) :].lstrip() if first_line else code
return language, code_without_language
def convert_to_markdown(text):
text = text.replace("$", "&#36;")
text = text.replace("\r\n", "\n")
def replace_leading_tabs_and_spaces(line):
new_line = []
for char in line:
if char == "\t":
new_line.append("&#9;")
elif char == " ":
new_line.append("&nbsp;")
else:
break
return "".join(new_line) + line[len(new_line) :]
markdown_text = ""
lines = text.split("\n")
in_code_block = False
for line in lines:
if in_code_block is False and line.startswith("```"):
in_code_block = True
markdown_text += f"{line}\n"
elif in_code_block is True and line.startswith("```"):
in_code_block = False
markdown_text += f"{line}\n"
elif in_code_block:
markdown_text += f"{line}\n"
else:
line = replace_leading_tabs_and_spaces(line)
line = re.sub(r"^(#)", r"\\\1", line)
markdown_text += f"{line} \n"
return markdown_text
def add_language_tag(text):
def detect_language(code_block):
try:
lexer = guess_lexer(code_block)
return lexer.name.lower()
except ClassNotFound:
return ""
code_block_pattern = re.compile(r"(```)(\w*\n[^`]+```)", re.MULTILINE)
def replacement(match):
code_block = match.group(2)
if match.group(2).startswith("\n"):
language = detect_language(code_block)
return (
f"```{language}{code_block}```" if language else f"```\n{code_block}```"
)
else:
return match.group(1) + code_block + "```"
text2 = code_block_pattern.sub(replacement, text)
return text2
def is_variable_assigned(var_name: str) -> bool:
return var_name in locals()
def pil_to_base64(
image: Image.Image,
alt: str = "user upload image",
resize: bool = True,
max_size: int = MAX_IMAGE_SIZE,
min_size: int = MIN_IMAGE_SIZE,
format: str = "JPEG",
quality: int = 95
) -> str:
if resize:
max_hw, min_hw = max(image.size), min(image.size)
aspect_ratio = max_hw / min_hw
shortest_edge = int(min(max_size / aspect_ratio, min_size, min_hw))
longest_edge = int(shortest_edge * aspect_ratio)
W, H = image.size
if H > W:
H, W = longest_edge, shortest_edge
else:
H, W = shortest_edge, longest_edge
image = image.resize((W, H))
buffered = io.BytesIO()
image.save(buffered, format=format, quality=quality)
img_b64_str = base64.b64encode(buffered.getvalue()).decode()
img_str = f'<img src="data:image/png;base64,{img_b64_str}" alt="{alt}" />'
return img_str
def parse_ref_bbox(response, image: Image.Image):
try:
image = image.copy()
image_h, image_w = image.size
draw = ImageDraw.Draw(image)
ref = re.findall(r'<\|ref\|>.*?<\|/ref\|>', response)
bbox = re.findall(r'<\|det\|>.*?<\|/det\|>', response)
assert len(ref) == len(bbox)
if len(ref) == 0:
return None
boxes, labels = [], []
for box, label in zip(bbox, ref):
box = box.replace('<|det|>', '').replace('<|/det|>', '')
label = label.replace('<|ref|>', '').replace('<|/ref|>', '')
box = box[1:-1]
for onebox in re.findall(r'\[.*?\]', box):
boxes.append(eval(onebox))
labels.append(label)
for indice, (box, label) in enumerate(zip(boxes, labels)):
box = (
int(box[0] / 999 * image_h),
int(box[1] / 999 * image_w),
int(box[2] / 999 * image_h),
int(box[3] / 999 * image_w),
)
box_color = BOX2COLOR[indice % len(BOX2COLOR.keys())]
box_width = 3
draw.rectangle(box, outline=box_color, width=box_width)
text_x = box[0]
text_y = box[1] - 20
text_color = box_color
font = ImageFont.truetype("deepseek_vl2/serve/assets/simsun.ttc", size=20)
draw.text((text_x, text_y), label, font=font, fill=text_color)
# print(f"boxes = {boxes}, labels = {labels}, re-render = {image}")
return image
except:
return None
def display_example(image_list):
images_html = ""
for i, img_path in enumerate(image_list):
image = Image.open(img_path)
buffered = io.BytesIO()
image.save(buffered, format="PNG", quality=100)
img_b64_str = base64.b64encode(buffered.getvalue()).decode()
img_str = f'<img src="data:image/png;base64,{img_b64_str}" alt="{img_path}" style="height:80px; margin-right: 10px;" />'
images_html += img_str
result_html = f"""
<div style="display: flex; align-items: center; margin-bottom: 10px;">
<div style="flex: 1; margin-right: 10px;">{images_html}</div>
</div>
"""
return result_html
/**
* Copyright (c) 2023-2024 DeepSeek.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
// ==UserScript==
// @name Kelpy Codos
// @namespace https://github.com/Keldos-Li/Kelpy-Codos
// @version 1.0.5
// @author Keldos; https://keldos.me/
// @description Add copy button to PRE tags before CODE tag, for Chuanhu ChatGPT especially.
// Based on Chuanhu ChatGPT version: ac04408 (2023-3-22)
// @license GPL-3.0
// @grant none
// ==/UserScript==
(function () {
"use strict";
function addCopyButton(pre) {
var code = pre.querySelector("code");
if (!code) {
return; // 如果没有找到 <code> 元素,则不添加按钮
}
var firstChild = code.firstChild;
if (!firstChild) {
return; // 如果 <code> 元素没有子节点,则不添加按钮
}
var button = document.createElement("button");
button.textContent = "\uD83D\uDCCE"; // 使用 📎 符号作为“复制”按钮的文本
button.style.position = "relative";
button.style.float = "right";
button.style.fontSize = "1em"; // 可选:调整按钮大小
button.style.background = "none"; // 可选:去掉背景颜色
button.style.border = "none"; // 可选:去掉边框
button.style.cursor = "pointer"; // 可选:显示指针样式
button.addEventListener("click", function () {
var range = document.createRange();
range.selectNodeContents(code);
range.setStartBefore(firstChild); // 将范围设置为第一个子节点之前
var selection = window.getSelection();
selection.removeAllRanges();
selection.addRange(range);
try {
var success = document.execCommand("copy");
if (success) {
button.textContent = "\u2714";
setTimeout(function () {
button.textContent = "\uD83D\uDCCE"; // 恢复按钮为“复制”
}, 2000);
} else {
button.textContent = "\u2716";
}
} catch (e) {
console.error(e);
button.textContent = "\u2716";
}
selection.removeAllRanges();
});
code.insertBefore(button, firstChild); // 将按钮插入到第一个子元素之前
}
function handleNewElements(mutationsList, observer) {
for (var mutation of mutationsList) {
if (mutation.type === "childList") {
for (var node of mutation.addedNodes) {
if (node.nodeName === "PRE") {
addCopyButton(node);
}
}
}
}
}
var observer = new MutationObserver(handleNewElements);
observer.observe(document.documentElement, {
childList: true,
subtree: true,
});
document.querySelectorAll("pre").forEach(addCopyButton);
})();
File suppressed by a .gitattributes entry or the file's encoding is unsupported.
/**
* Copyright (c) 2023-2024 DeepSeek.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
:root {
--chatbot-color-light: #f3f3f3;
--chatbot-color-dark: #121111;
}
/* status_display */
#status_display {
display: flex;
min-height: 2.5em;
align-items: flex-end;
justify-content: flex-end;
}
#status_display p {
font-size: 0.85em;
font-family: monospace;
color: var(--body-text-color-subdued);
}
/* usage_display */
#usage_display {
height: 1em;
}
#usage_display p {
padding: 0 1em;
font-size: 0.85em;
font-family: monospace;
color: var(--body-text-color-subdued);
}
/* list */
ol:not(.options),
ul:not(.options) {
padding-inline-start: 2em !important;
}
/* Thank @Keldos-Li for fixing it */
/* Light mode (default) */
#deepseek_chatbot {
background-color: var(--chatbot-color-light) !important;
color: #000000 !important;
}
[data-testid="bot"] {
background-color: #ffffff !important;
}
[data-testid="user"] {
background-color: #95ec69 !important;
}
/* Dark mode */
.dark #deepseek_chatbot {
background-color: var(--chatbot-color-dark) !important;
color: #ffffff !important;
}
.dark [data-testid="bot"] {
background-color: #2c2c2c !important;
}
.dark [data-testid="user"] {
background-color: #26b561 !important;
}
#deepseek_chatbot {
height: 100%;
min-height: 800px;
flex-grow: 1;
overflow: auto;
}
[class*="message"] {
border-radius: var(--radius-xl) !important;
border: none;
padding: var(--spacing-xl) !important;
font-size: var(--text-md) !important;
line-height: var(--line-md) !important;
min-height: calc(var(--text-md) * var(--line-md) + 2 * var(--spacing-xl));
min-width: calc(var(--text-md) * var(--line-md) + 2 * var(--spacing-xl));
}
[data-testid="bot"] {
max-width: 85%;
border-bottom-left-radius: 0 !important;
}
[data-testid="user"] {
max-width: 85%;
width: auto !important;
border-bottom-right-radius: 0 !important;
}
/* Table */
table {
margin: 1em 0;
border-collapse: collapse;
empty-cells: show;
}
td,
th {
border: 1.2px solid var(--border-color-primary) !important;
padding: 0.2em;
}
thead {
background-color: rgba(175, 184, 193, 0.2);
}
thead th {
padding: 0.5em 0.2em;
}
/* Inline code */
#deepseek_chatbot code {
display: inline;
white-space: break-spaces;
border-radius: 6px;
margin: 0 2px 0 2px;
padding: 0.2em 0.4em 0.1em 0.4em;
background-color: rgba(175, 184, 193, 0.2);
}
/* Code block */
#deepseek_chatbot pre code {
display: block;
overflow: auto;
white-space: pre;
background-color: #1c1d1e !important;
border-radius: 10px;
padding: 1.4em 1.2em 0em 1.4em;
margin: 1.2em 2em 1.2em 0.5em;
color: #fdf8f8;
box-shadow: 6px 6px 16px hsla(0, 0%, 0%, 0.2);
}
/* Hightlight */
#deepseek_chatbot .highlight {
background-color: transparent;
}
#deepseek_chatbot .highlight .hll {
background-color: #49483e;
}
#deepseek_chatbot .highlight .c {
color: #75715e;
} /* Comment */
#deepseek_chatbot .highlight .err {
color: #960050;
background-color: #1e0010;
} /* Error */
#deepseek_chatbot .highlight .k {
color: #66d9ef;
} /* Keyword */
#deepseek_chatbot .highlight .l {
color: #ae81ff;
} /* Literal */
#deepseek_chatbot .highlight .n {
color: #f8f8f2;
} /* Name */
#deepseek_chatbot .highlight .o {
color: #f92672;
} /* Operator */
#deepseek_chatbot .highlight .p {
color: #f8f8f2;
} /* Punctuation */
#deepseek_chatbot .highlight .ch {
color: #75715e;
} /* Comment.Hashbang */
#deepseek_chatbot .highlight .cm {
color: #75715e;
} /* Comment.Multiline */
#deepseek_chatbot .highlight .cp {
color: #75715e;
} /* Comment.Preproc */
#deepseek_chatbot .highlight .cpf {
color: #75715e;
} /* Comment.PreprocFile */
#deepseek_chatbot .highlight .c1 {
color: #75715e;
} /* Comment.Single */
#deepseek_chatbot .highlight .cs {
color: #75715e;
} /* Comment.Special */
#deepseek_chatbot .highlight .gd {
color: #f92672;
} /* Generic.Deleted */
#deepseek_chatbot .highlight .ge {
font-style: italic;
} /* Generic.Emph */
#deepseek_chatbot .highlight .gi {
color: #a6e22e;
} /* Generic.Inserted */
#deepseek_chatbot .highlight .gs {
font-weight: bold;
} /* Generic.Strong */
#deepseek_chatbot .highlight .gu {
color: #75715e;
} /* Generic.Subheading */
#deepseek_chatbot .highlight .kc {
color: #66d9ef;
} /* Keyword.Constant */
#deepseek_chatbot .highlight .kd {
color: #66d9ef;
} /* Keyword.Declaration */
#deepseek_chatbot .highlight .kn {
color: #f92672;
} /* Keyword.Namespace */
#deepseek_chatbot .highlight .kp {
color: #66d9ef;
} /* Keyword.Pseudo */
#deepseek_chatbot .highlight .kr {
color: #66d9ef;
} /* Keyword.Reserved */
#deepseek_chatbot .highlight .kt {
color: #66d9ef;
} /* Keyword.Type */
#deepseek_chatbot .highlight .ld {
color: #e6db74;
} /* Literal.Date */
#deepseek_chatbot .highlight .m {
color: #ae81ff;
} /* Literal.Number */
#deepseek_chatbot .highlight .s {
color: #e6db74;
} /* Literal.String */
#deepseek_chatbot .highlight .na {
color: #a6e22e;
} /* Name.Attribute */
#deepseek_chatbot .highlight .nb {
color: #f8f8f2;
} /* Name.Builtin */
#deepseek_chatbot .highlight .nc {
color: #a6e22e;
} /* Name.Class */
#deepseek_chatbot .highlight .no {
color: #66d9ef;
} /* Name.Constant */
#deepseek_chatbot .highlight .nd {
color: #a6e22e;
} /* Name.Decorator */
#deepseek_chatbot .highlight .ni {
color: #f8f8f2;
} /* Name.Entity */
#deepseek_chatbot .highlight .ne {
color: #a6e22e;
} /* Name.Exception */
#deepseek_chatbot .highlight .nf {
color: #a6e22e;
} /* Name.Function */
#deepseek_chatbot .highlight .nl {
color: #f8f8f2;
} /* Name.Label */
#deepseek_chatbot .highlight .nn {
color: #f8f8f2;
} /* Name.Namespace */
#deepseek_chatbot .highlight .nx {
color: #a6e22e;
} /* Name.Other */
#deepseek_chatbot .highlight .py {
color: #f8f8f2;
} /* Name.Property */
#deepseek_chatbot .highlight .nt {
color: #f92672;
} /* Name.Tag */
#deepseek_chatbot .highlight .nv {
color: #f8f8f2;
} /* Name.Variable */
#deepseek_chatbot .highlight .ow {
color: #f92672;
} /* Operator.Word */
#deepseek_chatbot .highlight .w {
color: #f8f8f2;
} /* Text.Whitespace */
#deepseek_chatbot .highlight .mb {
color: #ae81ff;
} /* Literal.Number.Bin */
#deepseek_chatbot .highlight .mf {
color: #ae81ff;
} /* Literal.Number.Float */
#deepseek_chatbot .highlight .mh {
color: #ae81ff;
} /* Literal.Number.Hex */
#deepseek_chatbot .highlight .mi {
color: #ae81ff;
} /* Literal.Number.Integer */
#deepseek_chatbot .highlight .mo {
color: #ae81ff;
} /* Literal.Number.Oct */
#deepseek_chatbot .highlight .sa {
color: #e6db74;
} /* Literal.String.Affix */
#deepseek_chatbot .highlight .sb {
color: #e6db74;
} /* Literal.String.Backtick */
#deepseek_chatbot .highlight .sc {
color: #e6db74;
} /* Literal.String.Char */
#deepseek_chatbot .highlight .dl {
color: #e6db74;
} /* Literal.String.Delimiter */
#deepseek_chatbot .highlight .sd {
color: #e6db74;
} /* Literal.String.Doc */
#deepseek_chatbot .highlight .s2 {
color: #e6db74;
} /* Literal.String.Double */
#deepseek_chatbot .highlight .se {
color: #ae81ff;
} /* Literal.String.Escape */
#deepseek_chatbot .highlight .sh {
color: #e6db74;
} /* Literal.String.Heredoc */
#deepseek_chatbot .highlight .si {
color: #e6db74;
} /* Literal.String.Interpol */
#deepseek_chatbot .highlight .sx {
color: #e6db74;
} /* Literal.String.Other */
#deepseek_chatbot .highlight .sr {
color: #e6db74;
} /* Literal.String.Regex */
#deepseek_chatbot .highlight .s1 {
color: #e6db74;
} /* Literal.String.Single */
#deepseek_chatbot .highlight .ss {
color: #e6db74;
} /* Literal.String.Symbol */
#deepseek_chatbot .highlight .bp {
color: #f8f8f2;
} /* Name.Builtin.Pseudo */
#deepseek_chatbot .highlight .fm {
color: #a6e22e;
} /* Name.Function.Magic */
#deepseek_chatbot .highlight .vc {
color: #f8f8f2;
} /* Name.Variable.Class */
#deepseek_chatbot .highlight .vg {
color: #f8f8f2;
} /* Name.Variable.Global */
#deepseek_chatbot .highlight .vi {
color: #f8f8f2;
} /* Name.Variable.Instance */
#deepseek_chatbot .highlight .vm {
color: #f8f8f2;
} /* Name.Variable.Magic */
#deepseek_chatbot .highlight .il {
color: #ae81ff;
} /* Literal.Number.Integer.Long */
/**
* Copyright (c) 2023-2024 DeepSeek.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
// custom javascript here
File suppressed by a .gitattributes entry or the file's encoding is unsupported.
# Copyright (c) 2023-2024 DeepSeek.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from threading import Thread
from typing import List
import torch
import transformers
from transformers import (
AutoModelForCausalLM,
StoppingCriteria,
StoppingCriteriaList,
TextIteratorStreamer,
)
from deepseek_vl2.models import DeepseekVLV2Processor, DeepseekVLV2ForCausalLM
from deepseek_vl2.models.conversation import Conversation
def load_model(model_path, dtype=torch.bfloat16):
vl_chat_processor = DeepseekVLV2Processor.from_pretrained(model_path)
tokenizer = vl_chat_processor.tokenizer
vl_gpt: DeepseekVLV2ForCausalLM = AutoModelForCausalLM.from_pretrained(
model_path, trust_remote_code=True, torch_dtype=dtype
)
vl_gpt = vl_gpt.cuda().eval()
return tokenizer, vl_gpt, vl_chat_processor
def convert_conversation_to_prompts(conversation: Conversation):
conv_prompts = []
last_image = None
messages = conversation.messages
for i in range(0, len(messages), 2):
if isinstance(messages[i][1], tuple):
text, images = messages[i][1]
last_image = images[-1]
else:
text, images = messages[i][1], []
prompt = {
"role": messages[i][0],
"content": text,
"images": images
}
response = {"role": messages[i + 1][0], "content": messages[i + 1][1]}
conv_prompts.extend([prompt, response])
return conv_prompts, last_image
class StoppingCriteriaSub(StoppingCriteria):
def __init__(self, stops=[], encounters=1):
super().__init__()
self.stops = [stop.to("cuda") for stop in stops]
def __call__(
self, input_ids: torch.LongTensor, scores: torch.FloatTensor, **kwargs
):
for stop in self.stops:
if input_ids.shape[-1] < len(stop):
continue
if torch.all((stop == input_ids[0][-len(stop) :])).item():
return True
return False
@torch.inference_mode()
def deepseek_generate(
conversations: list,
vl_gpt: torch.nn.Module,
vl_chat_processor: DeepseekVLV2Processor,
tokenizer: transformers.PreTrainedTokenizer,
stop_words: list,
max_length: int = 256,
temperature: float = 1.0,
top_p: float = 1.0,
repetition_penalty: float = 1.1,
chunk_size: int = -1
):
pil_images = []
for message in conversations:
if "images" not in message:
continue
pil_images.extend(message["images"])
prepare_inputs = vl_chat_processor.__call__(
conversations=conversations,
images=pil_images,
inference_mode=True,
force_batchify=True,
system_prompt=""
).to(vl_gpt.device)
return generate(
vl_gpt,
tokenizer,
prepare_inputs,
max_gen_len=max_length,
temperature=temperature,
repetition_penalty=repetition_penalty,
top_p=top_p,
stop_words=stop_words,
chunk_size=chunk_size
)
@torch.inference_mode()
def generate(
vl_gpt,
tokenizer,
prepare_inputs,
max_gen_len: int = 256,
temperature: float = 0,
repetition_penalty=1.1,
top_p: float = 0.95,
stop_words: List[str] = [],
chunk_size: int = -1
):
"""Stream the text output from the multimodality model with prompt and image inputs."""
streamer = TextIteratorStreamer(tokenizer, skip_prompt=True)
stop_words_ids = [
torch.tensor(tokenizer.encode(stop_word)) for stop_word in stop_words
]
stopping_criteria = StoppingCriteriaList(
[StoppingCriteriaSub(stops=stop_words_ids)]
)
if chunk_size != -1:
inputs_embeds, past_key_values = vl_gpt.incremental_prefilling(
input_ids=prepare_inputs.input_ids,
images=prepare_inputs.images,
images_seq_mask=prepare_inputs.images_seq_mask,
images_spatial_crop=prepare_inputs.images_spatial_crop,
attention_mask=prepare_inputs.attention_mask,
chunk_size=chunk_size
)
else:
inputs_embeds = vl_gpt.prepare_inputs_embeds(**prepare_inputs)
past_key_values = None
generation_config = dict(
inputs_embeds=inputs_embeds,
input_ids=prepare_inputs.input_ids,
images=prepare_inputs.images,
images_seq_mask=prepare_inputs.images_seq_mask,
images_spatial_crop=prepare_inputs.images_spatial_crop,
attention_mask=prepare_inputs.attention_mask,
past_key_values=past_key_values,
pad_token_id=tokenizer.eos_token_id,
bos_token_id=tokenizer.bos_token_id,
eos_token_id=tokenizer.eos_token_id,
max_new_tokens=max_gen_len,
do_sample=True,
use_cache=True,
streamer=streamer,
stopping_criteria=stopping_criteria,
)
if temperature > 0:
generation_config.update(
{
"do_sample": True,
"top_p": top_p,
"temperature": temperature,
"repetition_penalty": repetition_penalty,
}
)
else:
generation_config["do_sample"] = False
thread = Thread(target=vl_gpt.generate, kwargs=generation_config)
thread.start()
yield from streamer
# Copyright (c) 2023-2024 DeepSeek.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Copyright (c) 2023-2024 DeepSeek.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import json
from typing import Dict, List
import PIL.Image
import torch
from transformers import AutoModelForCausalLM
def load_pretrained_model(model_path: str):
from deepseek_vl2.models.processing_deepseek_vl_v2 import DeepseekVLV2Processor
from deepseek_vl2.models.modeling_deepseek_vl_v2 import DeepseekVLV2ForCausalLM
vl_chat_processor = DeepseekVLV2Processor.from_pretrained(model_path)
tokenizer = vl_chat_processor.tokenizer
vl_gpt: DeepseekVLV2ForCausalLM = AutoModelForCausalLM.from_pretrained(
model_path, trust_remote_code=True
)
vl_gpt = vl_gpt.to(torch.bfloat16).cuda().eval()
return tokenizer, vl_chat_processor, vl_gpt
def load_pil_images(conversations: List[Dict[str, str]]) -> List[PIL.Image.Image]:
"""
Args:
conversations (List[Dict[str, str]]): the conversations with a list of messages. An example is :
[
{
"role": "User",
"content": "<image>\nExtract all information from this image and convert them into markdown format.",
"images": ["./examples/table_datasets.png"]
},
{"role": "Assistant", "content": ""},
]
Returns:
pil_images (List[PIL.Image.Image]): the list of PIL images.
"""
pil_images = []
for message in conversations:
if "images" not in message:
continue
for image_path in message["images"]:
pil_img = PIL.Image.open(image_path)
pil_img = pil_img.convert("RGB")
pil_images.append(pil_img)
return pil_images
def load_json(filepath):
with open(filepath, "r") as f:
data = json.load(f)
return data
File suppressed by a .gitattributes entry or the file's encoding is unsupported.
File suppressed by a .gitattributes entry or the file's encoding is unsupported.
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="151" height="20" role="img" aria-label="DeepSeek: Homepage"><title>DeepSeek: Homepage</title><linearGradient id="s" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><clipPath id="r"><rect width="151" height="20" rx="3" fill="#fff"/></clipPath><g clip-path="url(#r)"><rect width="82" height="20" fill="#555"/><rect x="82" width="69" height="20" fill="#536af5"/><rect width="151" height="20" fill="url(#s)"/></g><g fill="#fff" text-anchor="middle" font-family="Verdana,Geneva,DejaVu Sans,sans-serif" text-rendering="geometricPrecision" font-size="110"><image x="5" y="3" width="14" height="14" xlink:href="data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iNjMuMTE5NjI5IiBoZWlnaHQ9IjQ2LjQwMzMyMCIgdmlld0JveD0iMCAwIDYzLjExOTYgNDYuNDAzMyIgZmlsbD0ibm9uZSIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIiB4bWxuczp4bGluaz0iaHR0cDovL3d3dy53My5vcmcvMTk5OS94bGluayI+Cgk8ZGVzYz4KCQkJQ3JlYXRlZCB3aXRoIFBpeHNvLgoJPC9kZXNjPgoJPGRlZnMvPgoJPHBhdGggaWQ9InBhdGgiIGQ9Ik02Mi40NTc1IDMuODk0NDFDNjEuNzg4OCAzLjU2NzI2IDYxLjUwMSA0LjE5MDggNjEuMTEwMSA0LjUwNzY5QzYwLjk3NjMgNC42MDk5OSA2MC44NjMgNC43NDI4IDYwLjc1IDQuODY1NDhDNTkuNzcyNyA1LjkwODIgNTguNjMxMSA2LjU5MzAyIDU3LjEzOTQgNi41MTEyM0M1NC45NTg3IDYuMzg4NTUgNTMuMDk2OSA3LjA3MzQ5IDUxLjQ1MTIgOC43Mzk3NUM1MS4xMDEzIDYuNjg1MDYgNDkuOTM5IDUuNDU4MzcgNDguMTY5OSA0LjY3MTI2QzQ3LjI0NDEgNC4yNjIzMyA0Ni4zMDgxIDMuODUzNTIgNDUuNjU5OSAyLjk2NDExQzQ1LjIwNzMgMi4zMzAzMiA0NS4wODQgMS42MjUgNDQuODU3NyAwLjkyOTkzMkM0NC43MTM2IDAuNTEwODY0IDQ0LjU2OTYgMC4wODE1NDMgNDQuMDg2MiAwLjAwOTg4NzdDNDMuNTYxNSAtMC4wNzE4OTk0IDQzLjM1NTcgMC4zNjc2NzYgNDMuMTUwMSAwLjczNTcxOEM0Mi4zMjcxIDIuMjM4NCA0Mi4wMDgzIDMuODk0NDEgNDIuMDM5MSA1LjU3MDhDNDIuMTExMSA5LjM0Mjc3IDQzLjcwNTYgMTIuMzQ4MSA0Ni44NzM4IDE0LjQ4NDZDNDcuMjMzNiAxNC43MyA0Ny4zMjY0IDE0Ljk3NTMgNDcuMjEzMSAxNS4zMzNDNDYuOTk3MSAxNi4wNjkxIDQ2Ljc0IDE2Ljc4NDcgNDYuNTEzNyAxNy41MjA2QzQ2LjM2OTYgMTcuOTkwOCA0Ni4xNTM4IDE4LjA5MyA0NS42NDk3IDE3Ljg4ODdDNDMuOTExNCAxNy4xNjI4IDQyLjQwOTQgMTYuMDg5NSA0MS4wODI1IDE0Ljc5MTNDMzguODI5OCAxMi42MTM5IDM2Ljc5MzIgMTAuMjExNyAzNC4yNTI0IDguMzMwODFDMzMuNjU1OCA3Ljg5MTI0IDMzLjA1OTMgNy40ODI0MiAzMi40NDIxIDcuMDkzOTlDMjkuODQ5OSA0LjU3OTIyIDMyLjc4MTUgMi41MTQ0IDMzLjQ2MDQgMi4yNjkwNEMzNC4xNzAyIDIuMDEzNDMgMzMuNzA3MyAxLjEzNDQgMzEuNDEzMyAxLjE0NDY1QzI5LjExOTYgMS4xNTQ3OSAyNy4wMjEyIDEuOTIxNTEgMjQuMzQ2NyAyLjk0MzczQzIzLjk1NTggMy4wOTcwNSAyMy41NDQ0IDMuMjA5NDcgMjMuMTIyNiAzLjMwMTUxQzIwLjY5NTEgMi44NDE0MyAxOC4xNzQ4IDIuNzM5MjYgMTUuNTQxNSAzLjAzNTc3QzEwLjU4MzUgMy41ODc3NyA2LjYyMzI5IDUuOTI4NTkgMy43MTI0IDkuOTI1NTRDMC4yMTUwODggMTQuNzMgLTAuNjA3OTEgMjAuMTg4NiAwLjQwMDE0NiAyNS44ODI0QzEuNDU5NzIgMzEuODgyOCA0LjUyNDkgMzYuODUwOCA5LjIzNjA4IDQwLjczNTRDMTQuMTIyMSA0NC43NjI5IDE5Ljc0ODggNDYuNzM1NyAyNi4xNjc1IDQ2LjM1NzVDMzAuMDY1OSA0Ni4xMzI3IDM0LjQwNjcgNDUuNjExMyAzOS4zMDMgNDEuNDcxM0M0MC41Mzc0IDQyLjA4NDcgNDEuODMzNSA0Mi4zMyA0My45ODM0IDQyLjUxNEM0NS42Mzk0IDQyLjY2NzQgNDcuMjMzNiA0Mi40MzIzIDQ4LjQ2OCA0Mi4xNzY2QzUwLjQwMTkgNDEuNzY3OCA1MC4yNjgzIDM5Ljk3ODkgNDkuNTY4OCAzOS42NTE3QzQzLjkwMDkgMzcuMDE0NCA0NS4xNDU1IDM4LjA4NzggNDQuMDE0MiAzNy4yMTg5QzQ2Ljg5NDMgMzMuODE0OCA1MS4yMzUxIDMwLjI3OCA1Mi45MzI0IDE4LjgxODhDNTMuMDY2MiAxNy45MDkxIDUyLjk1MjkgMTcuMzM2NyA1Mi45MzI0IDE2LjYwMDZDNTIuOTIyMSAxNi4xNTA5IDUzLjAyNDkgMTUuOTc3MSA1My41MzkzIDE1LjkyNTlDNTQuOTU4NyAxNS43NjI1IDU2LjMzNzIgMTUuMzczOSA1Ny42MDIzIDE0LjY3ODhDNjEuMjc0NyAxMi42NzUzIDYyLjc1NTkgOS4zODM2NyA2My4xMDU1IDUuNDM3OTlDNjMuMTU3IDQuODM0ODQgNjMuMDk1MiA0LjIxMTMgNjIuNDU3NSAzLjg5NDQxWk0zMC40NTY4IDM5LjQwNjVDMjQuOTYzOSAzNS4wOTI3IDIyLjI5OTggMzMuNjcxOCAyMS4xOTkgMzMuNzMzMkMyMC4xNzA0IDMzLjc5NDQgMjAuMzU1NyAzNC45NyAyMC41ODE4IDM1LjczNjdDMjAuODE4NiAzNi40OTMgMjEuMTI3MiAzNy4wMTQ0IDIxLjU1OTEgMzcuNjc4OEMyMS44NTc0IDM4LjExODQgMjIuMDYzMiAzOC43NzI3IDIxLjI2MDcgMzkuMjYzM0MxOS40OTE1IDQwLjM1NzEgMTYuNDE2IDM4Ljg5NTMgMTYuMjcyIDM4LjgyMzdDMTIuNjkyNCAzNi43MTggOS42OTg5NyAzMy45Mzc1IDcuNTkwMzMgMzAuMTM0OUM1LjU1MzQ3IDI2LjQ3NTMgNC4zNzA2MSAyMi41NDk5IDQuMTc1MjkgMTguMzU4OUM0LjEyMzc4IDE3LjM0NjggNC40MjIxMiAxNi45ODkgNS40MzAxOCAxNi44MDUxQzYuNzU3MDggMTYuNTU5NyA4LjEyNTI0IDE2LjUwODcgOS40NTIxNSAxNi43MDI5QzE1LjA1ODEgMTcuNTIwNiAxOS44MzExIDIwLjAyNSAyMy44MzIzIDIzLjk5MTNDMjYuMTE2IDI2LjI1MDQgMjcuODQ0IDI4Ljk0OTEgMjkuNjIzNSAzMS41ODY0QzMxLjUxNjQgMzQuMzg3MyAzMy41NTMgMzcuMDU1MyAzNi4xNDUgMzkuMjQyOUMzNy4wNjA1IDQwLjAwOTUgMzcuNzkxIDQwLjU5MjIgMzguNDkwNSA0MS4wMjE1QzM2LjM4MTYgNDEuMjU2NyAzMi44NjM4IDQxLjMwNzcgMzAuNDU2OCAzOS40MDY1Wk0zMy4wOTAxIDIyLjQ4ODZDMzMuMDkwMSAyMi4wMzg4IDMzLjQ1MDIgMjEuNjgxIDMzLjkwMjYgMjEuNjgxQzM0LjAwNTYgMjEuNjgxIDM0LjA5ODEgMjEuNzAxNSAzNC4xODA0IDIxLjczMjJDMzQuMjkzNSAyMS43NzMxIDM0LjM5NjUgMjEuODM0NCAzNC40Nzg4IDIxLjkyNjRDMzQuNjIyOCAyMi4wNjk1IDM0LjcwNTEgMjIuMjczOSAzNC43MDUxIDIyLjQ4ODZDMzQuNzA1MSAyMi45Mzg0IDM0LjM0NSAyMy4yOTYxIDMzLjg5MjMgMjMuMjk2MUMzMy40Mzk3IDIzLjI5NjEgMzMuMDkwMSAyMi45Mzg0IDMzLjA5MDEgMjIuNDg4NlpNNDEuMjY3NiAyNi42Nzk4QzQwLjc0MzIgMjYuODk0NCA0MC4yMTg1IDI3LjA3ODQgMzkuNzE0NCAyNy4wOTg5QzM4LjkzMjYgMjcuMTM5OCAzOC4wNzg5IDI2LjgyMjkgMzcuNjE2IDI2LjQzNDRDMzYuODk2IDI1LjgzMTMgMzYuMzgxNiAyNS40OTQgMzYuMTY1OCAyNC40NDFDMzYuMDczIDIzLjk5MTMgMzYuMTI0NSAyMy4yOTYxIDM2LjIwNjggMjIuODk3NUMzNi4zOTIxIDIyLjAzODggMzYuMTg2MyAyMS40ODY4IDM1LjU3OTMgMjAuOTg2QzM1LjA4NTcgMjAuNTc3IDM0LjQ1ODMgMjAuNDY0NiAzMy43NjkgMjAuNDY0NkMzMy41MTE3IDIwLjQ2NDYgMzMuMjc1MSAyMC4zNTIyIDMzLjEwMDMgMjAuMjYwMUMzMi44MTIzIDIwLjExNzEgMzIuNTc1NyAxOS43NTkzIDMyLjgwMiAxOS4zMTk3QzMyLjg3NCAxOS4xNzY2IDMzLjIyMzkgMTguODI5MSAzMy4zMDYyIDE4Ljc2NzdDMzQuMjQyMiAxOC4yMzYyIDM1LjMyMjMgMTguNDA5OSAzNi4zMjAxIDE4LjgwODZDMzcuMjQ1OCAxOS4xODY5IDM3Ljk0NTMgMTkuODgyIDM4Ljk1MzQgMjAuODYzM0MzOS45ODE5IDIyLjA0OTEgNDAuMTY3IDIyLjM3NjIgNDAuNzUzNCAyMy4yNjU1QzQxLjIxNjMgMjMuOTYwNyA0MS42Mzc5IDI0LjY3NjEgNDEuOTI2IDI1LjQ5NEM0Mi4xMDA4IDI2LjAwNTEgNDEuODc0NSAyNi40MjQyIDQxLjI2NzYgMjYuNjc5OFoiIGZpbGwtcnVsZT0ibm9uemVybyIgZmlsbD0iIzRENkJGRSIvPgo8L3N2Zz4K"/><text aria-hidden="true" x="505" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="550">DeepSeek</text><text x="505" y="140" transform="scale(.1)" fill="#fff" textLength="550">DeepSeek</text><text aria-hidden="true" x="1155" y="150" fill="#010101" fill-opacity=".3" transform="scale(.1)" textLength="590">Homepage</text><text x="1155" y="140" transform="scale(.1)" fill="#fff" textLength="590">Homepage</text></g></svg>
File suppressed by a .gitattributes entry or the file's encoding is unsupported.
File suppressed by a .gitattributes entry or the file's encoding is unsupported.
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment