Skip to content

Commit 5d6aecf

Browse files
committed
remove qwen agent producer
1 parent 803ef5f commit 5d6aecf

File tree

4 files changed

+2
-282
lines changed

4 files changed

+2
-282
lines changed

applications/ColossalChat/coati/distributed/agent/qwen_math_agentic_producer.py

Lines changed: 0 additions & 90 deletions
This file was deleted.

applications/ColossalChat/coati/distributed/agent/qwen_math_agentic_utils.py

Lines changed: 0 additions & 176 deletions
This file was deleted.

applications/ColossalChat/coati/distributed/launch.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55

66
import ray
77
from coati.distributed.agent.agentic_producer import AgenticProducer
8-
from coati.distributed.agent.qwen_math_agentic_producer import QwenMathAgenticProducer
98
from coati.distributed.agent.tool_worker import ToolWorker
109

1110
from .consumer import SimpleConsumer
@@ -14,7 +13,6 @@
1413

1514
ALGO_MAP = {"Simple": SimpleConsumer, "GRPO": GRPOConsumer, "DAPO": GRPOConsumer}
1615
AGENTIC_PRODUCER_MAP = {
17-
"QwenMathAgent": QwenMathAgenticProducer,
1816
"Agentic": AgenticProducer,
1917
} # supported agentic producers
2018

applications/ColossalChat/rl_example.py

Lines changed: 2 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,7 @@
165165
"--agentic-type",
166166
type=str,
167167
default="Agentic",
168-
choices=["Agentic", "QwenMathAgent"],
168+
choices=["Agentic"],
169169
help="Agentic model type for agentic training.",
170170
)
171171
parser.add_argument(
@@ -372,19 +372,7 @@
372372
if "agentic" in args.backend:
373373
assert "vllm" in args.backend, "Agentic backend only supports async-agentic-vllm backends."
374374
generate_config["n"] = 1 # agentic producer use AsyncProducer which processes one request a time
375-
if args.agentic_type == "QwenMathAgent":
376-
agentic_config = {
377-
"agentic_producer": "QwenMathAgent",
378-
"model": args.model,
379-
"model_type": "transformers",
380-
"generate_cfg": {
381-
"max_input_tokens": args.max_new_tokens + args.max_prompt_tokens,
382-
},
383-
}
384-
agentic_config["generate_cfg"].update(
385-
{k: v for k, v in generate_config.items() if k in ["top_k", "top_p", "temperature"]}
386-
)
387-
elif args.agentic_type == "Agentic":
375+
if args.agentic_type == "Agentic":
388376
generate_config["stop"] = ["<|im_end|>"]
389377
generate_config["prompt_logprobs"] = 0
390378
agentic_config = {

0 commit comments

Comments
 (0)