Skip to content
This repository was archived by the owner on Dec 18, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion dev.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "re_edge_gpt_dev"
version = "0.0.41"
version = "0.0.42"
authors = [
{ name = "JE-Chen", email = "[email protected]" },
]
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "re_edge_gpt"
version = "0.0.34"
version = "0.0.35"
authors = [
{ name = "JE-Chen", email = "[email protected]" },
]
Expand Down
4 changes: 2 additions & 2 deletions re_edge_gpt/chat/chathub.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(
conversation: Conversation,
proxy: str = None,
cookies: Union[List[dict], None] = None,
mode: str = "Bing"
mode: str = "Bing",
) -> None:
self.aio_session = None
self.request: ChatHubRequest
Expand Down Expand Up @@ -84,7 +84,7 @@ async def ask_stream(
# Use for attachment
attachment: dict = None,
remove_options: list = None,
add_options: list = None
add_options: list = None,
) -> Generator[bool, Union[dict, str], None]:
""" """
if self.encrypted_conversation_signature is not None:
Expand Down
2 changes: 1 addition & 1 deletion re_edge_gpt/chat/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@
"Edg/121.0.0.0",
}

BUNDLE_VERSION = "1.1573.4"
BUNDLE_VERSION = "1.1642.1"
APP_ID = "6c0f12ef-97d3-4869-bc42-c1d9bdb4a759"

SYDNEY_INIT_HEADER = HEADERS_INIT_CONVER.update(
Expand Down
115 changes: 72 additions & 43 deletions re_edge_gpt/chat/conversation_style.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,60 +9,89 @@

class ConversationStyle(Enum):
creative = [
"fluxcopilot",
"nojbf",
"nlu_direct_response_filter",
"deepleo",
"disable_emoji_spoken_text",
"responsible_ai_policy_235",
"enablemm",
"dv3sugg",
"autosave",
"iyxapbing",
"iycapbing",
"dgencontentv3",
"nointernalsugg",
"disable_telemetry",
"machine_affinity",
"streamf",
"codeint",
"langdtwb",
"fdwtlst",
"fluxprod",
"eredirecturl",
"deuct3"
"h3imaginative",
"clgalileo",
"gencontentv3",
"uquopt",
"sunoupsell",
"gndlogcf",
"flxvsearch",
"noknowimg",
"eredirecturl"
]
creative_classic = [
"nlu_direct_response_filter",
"deepleo",
"disable_emoji_spoken_text",
"responsible_ai_policy_235",
"enablemm",
"dv3sugg",
"autosave",
"iyxapbing",
"iycapbing",
"h3imaginative",
"clgalileo",
"gencontentv3",
"uquopt",
"sunoupsell",
"gndlogcf",
"flxvsearch",
"noknowimg",
"eredirecturl"
]
balanced = [
"fluxcopilot",
"nojbf",
"nlu_direct_response_filter",
"deepleo",
"disable_emoji_spoken_text",
"responsible_ai_policy_235",
"enablemm",
"dv3sugg",
"autosave",
"iyxapbing",
"iycapbing",
"dgencontentv3",
"nointernalsugg",
"disable_telemetry",
"machine_affinity",
"streamf",
"codeint",
"langdtwb",
"fdwtlst",
"fluxprod",
"eredirecturl",
"deuct3",
# Balance
"enable_user_consent",
"fluxmemcst",
"galileo",
"gldcl1p"
"saharagenconv5",
"dc1mncp",
"uquopt",
"sunoupsell",
"crkt2t",
"immslots",
"cpproname",
"vidtoppb",
"gptv1desc2",
"eredirecturl"
]
precise = [
"fluxcopilot",
"nojbf",
"nlu_direct_response_filter",
"deepleo",
"disable_emoji_spoken_text",
"responsible_ai_policy_235",
"enablemm",
"dv3sugg",
"autosave",
"iyxapbing",
"iycapbing",
"dgencontentv3",
"nointernalsugg",
"disable_telemetry",
"machine_affinity",
"streamf",
"codeint",
"langdtwb",
"fdwtlst",
"fluxprod",
"eredirecturl",
"deuct3",
# Precise
"h3precise"
"enable_user_consent",
"fluxmemcst",
"h3precise",
"clgalileo",
"uquopt",
"sunoupsell",
"crkt2t",
"flxvsearchans",
"noknowimg",
"eredirecturl"
]


Expand Down
2 changes: 1 addition & 1 deletion re_edge_gpt/chat/re_edge_gpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ class Chatbot:
def __init__(
self,
proxy: str | None = None,
cookies: list[dict] | None = None
cookies: list[dict] | None = None,
) -> None:
self.proxy: str | None = proxy
self.chat_hub: ChatHub = ChatHub(
Expand Down
64 changes: 41 additions & 23 deletions re_edge_gpt/chat/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,47 +55,64 @@ def update(

# Get current time
timestamp = datetime.now().strftime("%Y-%m-%dT%H:%M:%S") + offset_string
style = conversation_style.name.capitalize() # Make first letter uppercase
not_in_style = {"creative_classic": "CreativeClassic"}
self.struct = {
"arguments": [
{
"source": "cib",
"source": "cib-ccp",
"optionsSets": conversation_style.value,
"allowedMessageTypes": [
"ActionRequest",
"Chat",
"ConfirmationCard",
"Context",
"InternalSearchQuery",
"InternalSearchResult",
"Disengaged",
"InternalLoaderMessage",
"Progress",
"RenderCardRequest",
"RenderContentRequest",
"AdsQuery",
"SemanticSerp",
"GenerateContentQuery",
"SearchQuery",
"GeneratedCode",
"InternalTasksMessage"
],
"sliceIds": [
"schurmsg",
"ntbkcf",
"rankcf",
"bgstreamcf",
"cmcallapptf",
"vnextvoicecf",
"tts5cf",
"abv2mobcf",
"ctvismctrl",
"suppsm240rev10-t",
"suppsm240-t",
"translrefctrl",
"1215perscs0",
"0212bops0",
"116langwb",
"0112wtlsts0",
"118wcsmw",
"1201reasons0",
"0116trimgd",
"cacfastapis"
"disbotgrtcf",
"ntbkgold2",
"ntbkf1",
"qna10",
"thdnsrch",
"slangcf",
"vnextr100",
"vnext100",
"vnextvoice",
"rdlidncf",
"semserpnomlbg",
"semserpnoml",
"srchqryfix",
"cacntjndcae",
"edgenorrwrap",
"cmcpupsalltf",
"sunoupsell",
"313dynaplfs0",
"0312hrthrots0",
"0317immslotsc",
"228pyfiles0",
"kcclickthrucf",
"sportsatis0",
"0317dc1pro",
"defgrey",
"ssadsv4chtiidnoifbm",
"adsltmdsc",
"ssadsv2nocm"
],
"verbosity": "verbose",
"scenario":"SERP",
"scenario": "SERP",
"traceId": get_ran_hex(32),
"isStartOfSession": self.invocation_id == 3,
"message": {
Expand All @@ -113,7 +130,8 @@ def update(
"imageUrl": image_url if image_url else None,
"originalImageUrl": image_url if image_url else None,
},
"tone": conversation_style.name.capitalize(), # Make first letter uppercase
"tone": style if style not in not_in_style.keys()
else not_in_style.get(style),
"requestId": message_id,
"conversationSignature": self.conversation_signature,
"participant": {
Expand Down
12 changes: 5 additions & 7 deletions test/unit_test/back-end/manual_test/test_bot_manual.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
import asyncio
import json
# If you are using jupyter pls install this package
# from nest_asyncio import apply
from pathlib import Path

from re_edge_gpt import Chatbot
from re_edge_gpt import ConversationStyle


# If you are using jupyter pls install this package
# from nest_asyncio import apply


async def test_ask() -> None:
bot = None
try:
Expand All @@ -22,10 +20,10 @@ async def test_ask() -> None:
str(Path(str(Path.cwd()) + "/copilot_cookies.json")), encoding="utf-8").read())
bot = await Chatbot.create(cookies=cookies, mode=mode)
response = await bot.ask(
prompt="HELLO",
conversation_style=ConversationStyle.balanced,
prompt="What version u are using GPT-4 turbo?",
conversation_style=ConversationStyle.creative_classic,
simplify_response=True,
search_result=True
search_result=True,
)
# If you are using non ascii char you need set ensure_ascii=False
print(json.dumps(response, indent=2, ensure_ascii=False))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,21 @@
import asyncio
import shutil
import ssl
import sys
from pathlib import Path

import certifi

from re_edge_gpt import ImageGenAsync

# create a temporary output directory for testing purposes
test_output_dir = "test_output"
# download a test image
test_image_url = "https://picsum.photos/200"
auth_cooker = open("bing_cookies.txt", "r+").read()
async_gen = ImageGenAsync(auth_cookie=auth_cooker)
ssl_context = ssl.create_default_context()
ssl_context.load_verify_locations(certifi.where())
async_gen = ImageGenAsync(auth_cookie=auth_cooker, proxy="http://[email protected]:8012")


# Generate image list async
Expand Down