Skip to content

Commit

Permalink
Merge pull request #1362 from open-webui/dev
Browse files Browse the repository at this point in the history
0.1.116
  • Loading branch information
tjbck committed Mar 31, 2024
2 parents ac294a7 + 46a1810 commit 3b0cb79
Show file tree
Hide file tree
Showing 57 changed files with 2,099 additions and 757 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/format-backend.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,5 @@ jobs:
pip install yapf
- name: Format backend
run: bun run format:backend
- name: Check for changes after format
run: git diff --exit-code
3 changes: 3 additions & 0 deletions .github/workflows/format-build-frontend.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,8 @@ jobs:
run: bun install
- name: Format frontend
run: bun run format
- name: Check for changes after format
run: git diff --exit-code
- name: Build frontend
if: always()
run: bun run build
18 changes: 18 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,24 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.1.116] - 2024-03-31

### Added

- **🔄 Enhanced UI**: Model selector now conveniently located in the navbar, enabling seamless switching between multiple models during conversations.
- **🔍 Improved Model Selector**: Directly pull a model from the selector/Models now display detailed information for better understanding.
- **💬 Webhook Support**: Now compatible with Google Chat and Microsoft Teams.
- **🌐 Localization**: Korean translation (I18n) now available.
- **🌑 Dark Theme**: OLED dark theme introduced for reduced strain during prolonged usage.
- **🏷️ Tag Autocomplete**: Dropdown feature added for effortless chat tagging.

### Fixed

- **🔽 Auto-Scrolling**: Addressed OpenAI auto-scrolling issue.
- **🏷️ Tag Validation**: Implemented tag validation to prevent empty string tags.
- **🚫 Model Whitelisting**: Resolved LiteLLM model whitelisting issue.
- **✅ Spelling**: Corrected various spelling issues for improved readability.

## [0.1.115] - 2024-03-24

### Added
Expand Down
8 changes: 7 additions & 1 deletion backend/apps/audio/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,13 @@
)
from utils.misc import calculate_sha256

from config import SRC_LOG_LEVELS, CACHE_DIR, UPLOAD_DIR, WHISPER_MODEL, WHISPER_MODEL_DIR
from config import (
SRC_LOG_LEVELS,
CACHE_DIR,
UPLOAD_DIR,
WHISPER_MODEL,
WHISPER_MODEL_DIR,
)

log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["AUDIO"])
Expand Down
14 changes: 12 additions & 2 deletions backend/apps/ollama/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,13 @@
from utils.utils import decode_token, get_current_user, get_admin_user


from config import SRC_LOG_LEVELS, OLLAMA_BASE_URLS, MODEL_FILTER_ENABLED, MODEL_FILTER_LIST, UPLOAD_DIR
from config import (
SRC_LOG_LEVELS,
OLLAMA_BASE_URLS,
MODEL_FILTER_ENABLED,
MODEL_FILTER_LIST,
UPLOAD_DIR,
)
from utils.misc import calculate_sha256

log = logging.getLogger(__name__)
Expand Down Expand Up @@ -770,7 +776,11 @@ async def generate_chat_completion(

r = None

log.debug("form_data.model_dump_json(exclude_none=True).encode(): {0} ".format(form_data.model_dump_json(exclude_none=True).encode()))
log.debug(
"form_data.model_dump_json(exclude_none=True).encode(): {0} ".format(
form_data.model_dump_json(exclude_none=True).encode()
)
)

def get_request():
nonlocal form_data
Expand Down
102 changes: 59 additions & 43 deletions backend/apps/rag/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
TextLoader,
PyPDFLoader,
CSVLoader,
BSHTMLLoader,
Docx2txtLoader,
UnstructuredEPubLoader,
UnstructuredWordDocumentLoader,
Expand Down Expand Up @@ -114,6 +115,7 @@ class CollectionNameForm(BaseModel):
class StoreWebForm(CollectionNameForm):
url: str


@app.get("/")
async def get_status():
return {
Expand Down Expand Up @@ -296,13 +298,18 @@ def store_web(form_data: StoreWebForm, user=Depends(get_current_user)):


def store_data_in_vector_db(data, collection_name, overwrite: bool = False) -> bool:

text_splitter = RecursiveCharacterTextSplitter(
chunk_size=app.state.CHUNK_SIZE,
chunk_overlap=app.state.CHUNK_OVERLAP,
add_start_index=True,
)
docs = text_splitter.split_documents(data)
return store_docs_in_vector_db(docs, collection_name, overwrite)

if len(docs) > 0:
return store_docs_in_vector_db(docs, collection_name, overwrite), None
else:
raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT)


def store_text_in_vector_db(
Expand All @@ -318,6 +325,7 @@ def store_text_in_vector_db(


def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> bool:

texts = [doc.page_content for doc in docs]
metadatas = [doc.metadata for doc in docs]

Expand Down Expand Up @@ -402,6 +410,8 @@ def get_loader(filename: str, file_content_type: str, file_path: str):
loader = UnstructuredRSTLoader(file_path, mode="elements")
elif file_ext == "xml":
loader = UnstructuredXMLLoader(file_path)
elif file_ext in ["htm", "html"]:
loader = BSHTMLLoader(file_path, open_encoding="unicode_escape")
elif file_ext == "md":
loader = UnstructuredMarkdownLoader(file_path)
elif file_content_type == "application/epub+zip":
Expand Down Expand Up @@ -452,19 +462,21 @@ def store_doc(

loader, known_type = get_loader(file.filename, file.content_type, file_path)
data = loader.load()
result = store_data_in_vector_db(data, collection_name)

if result:
return {
"status": True,
"collection_name": collection_name,
"filename": filename,
"known_type": known_type,
}
else:

try:
result = store_data_in_vector_db(data, collection_name)

if result:
return {
"status": True,
"collection_name": collection_name,
"filename": filename,
"known_type": known_type,
}
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=ERROR_MESSAGES.DEFAULT(),
detail=e,
)
except Exception as e:
log.exception(e)
Expand Down Expand Up @@ -529,38 +541,42 @@ def scan_docs_dir(user=Depends(get_admin_user)):
)
data = loader.load()

result = store_data_in_vector_db(data, collection_name)

if result:
sanitized_filename = sanitize_filename(filename)
doc = Documents.get_doc_by_name(sanitized_filename)

if doc == None:
doc = Documents.insert_new_doc(
user.id,
DocumentForm(
**{
"name": sanitized_filename,
"title": filename,
"collection_name": collection_name,
"filename": filename,
"content": (
json.dumps(
{
"tags": list(
map(
lambda name: {"name": name},
tags,
try:
result = store_data_in_vector_db(data, collection_name)

if result:
sanitized_filename = sanitize_filename(filename)
doc = Documents.get_doc_by_name(sanitized_filename)

if doc == None:
doc = Documents.insert_new_doc(
user.id,
DocumentForm(
**{
"name": sanitized_filename,
"title": filename,
"collection_name": collection_name,
"filename": filename,
"content": (
json.dumps(
{
"tags": list(
map(
lambda name: {"name": name},
tags,
)
)
)
}
)
if len(tags)
else "{}"
),
}
),
)
}
)
if len(tags)
else "{}"
),
}
),
)
except Exception as e:
print(e)
pass

except Exception as e:
log.exception(e)
Expand Down
1 change: 1 addition & 0 deletions backend/apps/web/models/auths.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from apps.web.internal.db import DB

from config import SRC_LOG_LEVELS

log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["MODELS"])

Expand Down
1 change: 1 addition & 0 deletions backend/apps/web/models/documents.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import json

from config import SRC_LOG_LEVELS

log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["MODELS"])

Expand Down
24 changes: 12 additions & 12 deletions backend/apps/web/models/modelfiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,16 +64,17 @@ def __init__(self, db):
self.db.create_tables([Modelfile])

def insert_new_modelfile(
self, user_id: str,
form_data: ModelfileForm) -> Optional[ModelfileModel]:
self, user_id: str, form_data: ModelfileForm
) -> Optional[ModelfileModel]:
if "tagName" in form_data.modelfile:
modelfile = ModelfileModel(
**{
"user_id": user_id,
"tag_name": form_data.modelfile["tagName"],
"modelfile": json.dumps(form_data.modelfile),
"timestamp": int(time.time()),
})
}
)

try:
result = Modelfile.create(**modelfile.model_dump())
Expand All @@ -87,29 +88,28 @@ def insert_new_modelfile(
else:
return None

def get_modelfile_by_tag_name(self,
tag_name: str) -> Optional[ModelfileModel]:
def get_modelfile_by_tag_name(self, tag_name: str) -> Optional[ModelfileModel]:
try:
modelfile = Modelfile.get(Modelfile.tag_name == tag_name)
return ModelfileModel(**model_to_dict(modelfile))
except:
return None

def get_modelfiles(self,
skip: int = 0,
limit: int = 50) -> List[ModelfileResponse]:
def get_modelfiles(self, skip: int = 0, limit: int = 50) -> List[ModelfileResponse]:
return [
ModelfileResponse(
**{
**model_to_dict(modelfile),
"modelfile":
json.loads(modelfile.modelfile),
}) for modelfile in Modelfile.select()
"modelfile": json.loads(modelfile.modelfile),
}
)
for modelfile in Modelfile.select()
# .limit(limit).offset(skip)
]

def update_modelfile_by_tag_name(
self, tag_name: str, modelfile: dict) -> Optional[ModelfileModel]:
self, tag_name: str, modelfile: dict
) -> Optional[ModelfileModel]:
try:
query = Modelfile.update(
modelfile=json.dumps(modelfile),
Expand Down
15 changes: 9 additions & 6 deletions backend/apps/web/models/prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,16 +52,18 @@ def __init__(self, db):
self.db = db
self.db.create_tables([Prompt])

def insert_new_prompt(self, user_id: str,
form_data: PromptForm) -> Optional[PromptModel]:
def insert_new_prompt(
self, user_id: str, form_data: PromptForm
) -> Optional[PromptModel]:
prompt = PromptModel(
**{
"user_id": user_id,
"command": form_data.command,
"title": form_data.title,
"content": form_data.content,
"timestamp": int(time.time()),
})
}
)

try:
result = Prompt.create(**prompt.model_dump())
Expand All @@ -81,13 +83,14 @@ def get_prompt_by_command(self, command: str) -> Optional[PromptModel]:

def get_prompts(self) -> List[PromptModel]:
return [
PromptModel(**model_to_dict(prompt)) for prompt in Prompt.select()
PromptModel(**model_to_dict(prompt))
for prompt in Prompt.select()
# .limit(limit).offset(skip)
]

def update_prompt_by_command(
self, command: str,
form_data: PromptForm) -> Optional[PromptModel]:
self, command: str, form_data: PromptForm
) -> Optional[PromptModel]:
try:
query = Prompt.update(
title=form_data.title,
Expand Down
1 change: 1 addition & 0 deletions backend/apps/web/models/tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from apps.web.internal.db import DB

from config import SRC_LOG_LEVELS

log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["MODELS"])

Expand Down
1 change: 1 addition & 0 deletions backend/apps/web/routers/chats.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from constants import ERROR_MESSAGES

from config import SRC_LOG_LEVELS

log = logging.getLogger(__name__)
log.setLevel(SRC_LOG_LEVELS["MODELS"])

Expand Down
8 changes: 6 additions & 2 deletions backend/apps/web/routers/configs.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,12 @@

from apps.web.models.users import Users

from utils.utils import get_password_hash, get_current_user, get_admin_user, create_token
from utils.utils import (
get_password_hash,
get_current_user,
get_admin_user,
create_token,
)
from utils.misc import get_gravatar_url, validate_email_format
from constants import ERROR_MESSAGES

Expand Down Expand Up @@ -43,7 +48,6 @@ async def set_global_default_models(
return request.app.state.DEFAULT_MODELS



@router.post("/default/suggestions", response_model=List[PromptSuggestion])
async def set_global_default_suggestions(
request: Request,
Expand Down

0 comments on commit 3b0cb79

Please sign in to comment.