fmt: cargo fmt

This commit is contained in:
Keisuke Hirata 2026-01-07 22:04:44 +09:00
parent bb73dc6a45
commit 1e126c1698
20 changed files with 263 additions and 227 deletions

View File

@ -6,7 +6,7 @@
use proc_macro::TokenStream;
use quote::{format_ident, quote};
use syn::{
parse_macro_input, Attribute, FnArg, ImplItem, ItemImpl, Lit, Meta, Pat, ReturnType, Type,
Attribute, FnArg, ImplItem, ItemImpl, Lit, Meta, Pat, ReturnType, Type, parse_macro_input,
};
/// `impl` ブロックに付与し、内部の `#[tool]` 属性がついたメソッドからツールを生成するマクロ。

View File

@ -127,7 +127,10 @@ pub trait WorkerHook: Send + Sync {
/// ツール実行後
///
/// 結果を書き換えたり、隠蔽したりできる。
async fn after_tool_call(&self, _tool_result: &mut ToolResult) -> Result<ControlFlow, HookError> {
async fn after_tool_call(
&self,
_tool_result: &mut ToolResult,
) -> Result<ControlFlow, HookError> {
Ok(ControlFlow::Continue)
}

View File

@ -54,7 +54,10 @@ pub enum ContentPart {
},
/// ツール結果
#[serde(rename = "tool_result")]
ToolResult { tool_use_id: String, content: String },
ToolResult {
tool_use_id: String,
content: String,
},
}
impl Message {

View File

@ -3,9 +3,7 @@
//! Timeline層のHandler機構の薄いラッパーとして設計され、
//! UIへのストリーミング表示やリアルタイムフィードバックを可能にする。
use crate::{
ErrorEvent, StatusEvent, TextBlockEvent, ToolCall, ToolUseBlockEvent, UsageEvent,
};
use crate::{ErrorEvent, StatusEvent, TextBlockEvent, ToolCall, ToolUseBlockEvent, UsageEvent};
// =============================================================================
// WorkerSubscriber Trait
@ -74,7 +72,11 @@ pub trait WorkerSubscriber: Send {
///
/// Start/InputJsonDelta/Stopのライフサイクルを持つ。
#[allow(unused_variables)]
fn on_tool_use_block(&mut self, scope: &mut Self::ToolUseBlockScope, event: &ToolUseBlockEvent) {
fn on_tool_use_block(
&mut self,
scope: &mut Self::ToolUseBlockScope,
event: &ToolUseBlockEvent,
) {
}
// =========================================================================

View File

@ -111,8 +111,8 @@ impl Handler<UsageKind> for UsageTracker {
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// APIキーを環境変数から取得
let api_key = std::env::var("GEMINI_API_KEY")
.expect("GEMINI_API_KEY environment variable must be set");
let api_key =
std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable must be set");
println!("=== Gemini LLM Client + Timeline Integration Example ===\n");

View File

@ -16,9 +16,6 @@
//! ANTHROPIC_API_KEY=your-key cargo run --example record_test_fixtures -- --all
//! ```
mod recorder;
mod scenarios;
@ -82,7 +79,8 @@ async fn run_scenario_with_openai(
subdir: &str,
model: Option<String>,
) -> Result<(), Box<dyn std::error::Error>> {
let api_key = std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY environment variable must be set");
let api_key =
std::env::var("OPENAI_API_KEY").expect("OPENAI_API_KEY environment variable must be set");
let model = model.as_deref().unwrap_or("gpt-4o");
let client = OpenAIClient::new(&api_key, model);
@ -125,8 +123,8 @@ async fn run_scenario_with_gemini(
subdir: &str,
model: Option<String>,
) -> Result<(), Box<dyn std::error::Error>> {
let api_key = std::env::var("GEMINI_API_KEY")
.expect("GEMINI_API_KEY environment variable must be set");
let api_key =
std::env::var("GEMINI_API_KEY").expect("GEMINI_API_KEY environment variable must be set");
let model = model.as_deref().unwrap_or("gemini-2.0-flash");
let client = GeminiClient::new(&api_key, model);
@ -142,9 +140,6 @@ async fn run_scenario_with_gemini(
Ok(())
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
dotenv::dotenv().ok();
@ -202,10 +197,18 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
// ここでは単純なループで実行
for scenario in scenarios_to_run {
match args.client {
ClientType::Anthropic => run_scenario_with_anthropic(&scenario, subdir, args.model.clone()).await?,
ClientType::Gemini => run_scenario_with_gemini(&scenario, subdir, args.model.clone()).await?,
ClientType::Openai => run_scenario_with_openai(&scenario, subdir, args.model.clone()).await?,
ClientType::Ollama => run_scenario_with_ollama(&scenario, subdir, args.model.clone()).await?,
ClientType::Anthropic => {
run_scenario_with_anthropic(&scenario, subdir, args.model.clone()).await?
}
ClientType::Gemini => {
run_scenario_with_gemini(&scenario, subdir, args.model.clone()).await?
}
ClientType::Openai => {
run_scenario_with_openai(&scenario, subdir, args.model.clone()).await?
}
ClientType::Ollama => {
run_scenario_with_ollama(&scenario, subdir, args.model.clone()).await?
}
}
}

View File

@ -38,14 +38,14 @@ use tracing_subscriber::EnvFilter;
use clap::{Parser, ValueEnum};
use worker::{
Handler, TextBlockEvent, TextBlockKind, ToolUseBlockEvent, ToolUseBlockKind, Worker,
llm_client::{
LlmClient,
providers::{
anthropic::AnthropicClient, gemini::GeminiClient, ollama::OllamaClient,
openai::OpenAIClient,
},
LlmClient,
},
Handler, TextBlockEvent, TextBlockKind, ToolUseBlockEvent, ToolUseBlockKind, Worker,
};
use worker_macros::tool_registry;
use worker_types::Message;
@ -310,8 +310,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
// ロギング初期化
// RUST_LOG=debug cargo run --example worker_cli ... で詳細ログ表示
// デフォルトは warn レベル、RUST_LOG 環境変数で上書き可能
let filter = EnvFilter::try_from_default_env()
.unwrap_or_else(|_| EnvFilter::new("warn"));
let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("warn"));
tracing_subscriber::fmt()
.with_env_filter(filter)

View File

@ -6,7 +6,7 @@ use std::pin::Pin;
use async_trait::async_trait;
use eventsource_stream::Eventsource;
use futures::{future::ready, Stream, StreamExt, TryStreamExt};
use futures::{Stream, StreamExt, TryStreamExt, future::ready};
use reqwest::header::{CONTENT_TYPE, HeaderMap, HeaderValue};
use worker_types::Event;
@ -178,7 +178,6 @@ impl LlmClient for AnthropicClient {
}
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -10,9 +10,7 @@ use futures::Stream;
use worker_types::Event;
use crate::llm_client::{
ClientError, LlmClient, Request,
providers::openai::OpenAIClient,
scheme::openai::OpenAIScheme,
ClientError, LlmClient, Request, providers::openai::OpenAIClient, scheme::openai::OpenAIScheme,
};
/// Ollama クライアント

View File

@ -159,7 +159,8 @@ impl LlmClient for OpenAIClient {
.map_err(|e| std::io::Error::other(e));
let event_stream = byte_stream.eventsource();
let stream = event_stream.map(move |result| {
let stream = event_stream
.map(move |result| {
match result {
Ok(event) => {
// SSEイベントをパース

View File

@ -127,12 +127,11 @@ impl GeminiScheme {
return Ok(None);
}
let response: GenerateContentResponse = serde_json::from_str(data).map_err(|e| {
ClientError::Api {
let response: GenerateContentResponse =
serde_json::from_str(data).map_err(|e| ClientError::Api {
status: None,
code: Some("parse_error".to_string()),
message: format!("Failed to parse Gemini SSE data: {} -> {}", e, data),
}
})?;
let mut events = Vec::new();
@ -155,10 +154,7 @@ impl GeminiScheme {
if !text.is_empty() {
// Geminiは明示的なBlockStartを送らないため、
// TextDeltaを直接送るTimelineが暗黙的に開始を処理
events.push(Event::text_delta(
part_index,
text.clone(),
));
events.push(Event::text_delta(part_index, text.clone()));
}
}
@ -240,7 +236,8 @@ mod tests {
#[test]
fn test_parse_text_response() {
let scheme = GeminiScheme::new();
let data = r#"{"candidates":[{"content":{"parts":[{"text":"Hello"}],"role":"model"},"index":0}]}"#;
let data =
r#"{"candidates":[{"content":{"parts":[{"text":"Hello"}],"role":"model"},"index":0}]}"#;
let events = scheme.parse_event(data).unwrap().unwrap();
assert_eq!(events.len(), 1);

View File

@ -46,9 +46,7 @@ pub(crate) struct GeminiContent {
#[serde(untagged)]
pub(crate) enum GeminiPart {
/// テキストパーツ
Text {
text: String,
},
Text { text: String },
/// 関数呼び出しパーツ
FunctionCall {
#[serde(rename = "functionCall")]
@ -160,11 +158,7 @@ impl GeminiScheme {
vec![]
} else {
vec![GeminiTool {
function_declarations: request
.tools
.iter()
.map(|t| self.convert_tool(t))
.collect(),
function_declarations: request.tools.iter().map(|t| self.convert_tool(t)).collect(),
}]
};
@ -224,19 +218,16 @@ impl GeminiScheme {
},
}]
}
MessageContent::Parts(parts) => {
parts
MessageContent::Parts(parts) => parts
.iter()
.map(|p| match p {
ContentPart::Text { text } => GeminiPart::Text { text: text.clone() },
ContentPart::ToolUse { id: _, name, input } => {
GeminiPart::FunctionCall {
ContentPart::ToolUse { id: _, name, input } => GeminiPart::FunctionCall {
function_call: GeminiFunctionCall {
name: name.clone(),
args: input.clone(),
},
}
}
},
ContentPart::ToolResult {
tool_use_id,
content,
@ -250,8 +241,7 @@ impl GeminiScheme {
},
},
})
.collect()
}
.collect(),
};
GeminiContent {
@ -306,16 +296,17 @@ mod tests {
assert_eq!(gemini_req.tools.len(), 1);
assert_eq!(gemini_req.tools[0].function_declarations.len(), 1);
assert_eq!(gemini_req.tools[0].function_declarations[0].name, "get_weather");
assert_eq!(
gemini_req.tools[0].function_declarations[0].name,
"get_weather"
);
assert!(gemini_req.tool_config.is_some());
}
#[test]
fn test_assistant_role_is_model() {
let scheme = GeminiScheme::new();
let request = Request::new()
.user("Hello")
.assistant("Hi there!");
let request = Request::new().user("Hello").assistant("Hi there!");
let gemini_req = scheme.build_request(&request);

View File

@ -69,8 +69,8 @@ impl OpenAIScheme {
return Ok(None);
}
let chunk: ChatCompletionChunk = serde_json::from_str(data)
.map_err(|e| ClientError::Api {
let chunk: ChatCompletionChunk =
serde_json::from_str(data).map_err(|e| ClientError::Api {
status: None,
code: Some("parse_error".to_string()),
message: format!("Failed to parse SSE data: {} -> {}", e, data),
@ -102,7 +102,11 @@ impl OpenAIScheme {
for tool_call in tool_calls {
// Start of tool call (has ID)
if let Some(id) = tool_call.id {
let name = tool_call.function.as_ref().and_then(|f| f.name.clone()).unwrap_or_default();
let name = tool_call
.function
.as_ref()
.and_then(|f| f.name.clone())
.unwrap_or_default();
events.push(Event::tool_use_start(tool_call.index, id, name));
}
@ -126,7 +130,8 @@ impl OpenAIScheme {
_ => Some(StopReason::EndTurn),
};
let is_tool_finish = finish_reason == "tool_calls" || finish_reason == "function_call";
let is_tool_finish =
finish_reason == "tool_calls" || finish_reason == "function_call";
if is_tool_finish {
// ツール呼び出し終了

View File

@ -120,12 +120,7 @@ impl OpenAIScheme {
});
}
messages.extend(
request
.messages
.iter()
.map(|m| self.convert_message(m))
);
messages.extend(request.messages.iter().map(|m| self.convert_message(m)));
let tools = request.tools.iter().map(|t| self.convert_tool(t)).collect();
@ -143,7 +138,9 @@ impl OpenAIScheme {
top_p: request.config.top_p,
stop: request.config.stop_sequences.clone(),
stream: true,
stream_options: Some(StreamOptions { include_usage: true }),
stream_options: Some(StreamOptions {
include_usage: true,
}),
messages,
tools,
tool_choice: None, // Default to auto if tools are present? Or let API decide (which is auto)
@ -265,13 +262,10 @@ impl OpenAIScheme {
mod tests {
use super::*;
#[test]
fn test_build_simple_request() {
let scheme = OpenAIScheme::new();
let request = Request::new()
.system("System prompt")
.user("Hello");
let request = Request::new().system("System prompt").user("Hello");
let body = scheme.build_request("gpt-4o", &request);
@ -303,9 +297,7 @@ mod tests {
#[test]
fn test_build_request_legacy_max_tokens() {
let scheme = OpenAIScheme::new().with_legacy_max_tokens(true);
let request = Request::new()
.user("Hello")
.max_tokens(100);
let request = Request::new().user("Hello").max_tokens(100);
let body = scheme.build_request("llama3", &request);
@ -317,9 +309,7 @@ mod tests {
#[test]
fn test_build_request_modern_max_tokens() {
let scheme = OpenAIScheme::new(); // Default matches modern (legacy=false)
let request = Request::new()
.user("Hello")
.max_tokens(100);
let request = Request::new().user("Hello").max_tokens(100);
let body = scheme.build_request("gpt-4o", &request);

View File

@ -4,6 +4,7 @@ use std::sync::{Arc, Mutex};
use futures::StreamExt;
use tracing::{debug, info, trace, warn};
use crate::Timeline;
use crate::llm_client::{ClientError, LlmClient, Request, ToolDefinition};
use crate::subscriber_adapter::{
ErrorSubscriberAdapter, StatusSubscriberAdapter, TextBlockSubscriberAdapter,
@ -11,7 +12,6 @@ use crate::subscriber_adapter::{
};
use crate::text_block_collector::TextBlockCollector;
use crate::tool_call_collector::ToolCallCollector;
use crate::Timeline;
use worker_types::{
ContentPart, ControlFlow, HookError, Message, MessageContent, Tool, ToolCall, ToolError,
ToolResult, TurnResult, WorkerHook, WorkerSubscriber,
@ -442,10 +442,7 @@ impl<C: LlmClient> Worker<C> {
}
/// Hooks: on_turn_end
async fn run_on_turn_end_hooks(
&self,
messages: &[Message],
) -> Result<TurnResult, WorkerError> {
async fn run_on_turn_end_hooks(&self, messages: &[Message]) -> Result<TurnResult, WorkerError> {
for hook in &self.hooks {
let result = hook.on_turn_end(messages).await?;
match result {

View File

@ -3,13 +3,13 @@
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use std::pin::Pin;
use std::sync::{Arc, Mutex};
use async_trait::async_trait;
use futures::Stream;
use worker::{Handler, TextBlockEvent, TextBlockKind, Timeline};
use worker::llm_client::{ClientError, LlmClient, Request};
use worker::{Handler, TextBlockEvent, TextBlockKind, Timeline};
use worker_types::{BlockType, DeltaContent, Event};
use std::sync::atomic::{AtomicUsize, Ordering};
@ -135,7 +135,8 @@ pub fn assert_event_sequence(subdir: &str) {
}
// Find a text-based fixture
let fixture_path = fixtures.iter()
let fixture_path = fixtures
.iter()
.find(|p| p.to_string_lossy().contains("text"))
.unwrap_or(&fixtures[0]);
@ -184,7 +185,9 @@ pub fn assert_event_sequence(subdir: &str) {
assert!(stop_found, "Should contain BlockStop for Text block");
} else {
if !stop_found {
println!(" [Type: ToolUse] BlockStop detection skipped (not explicitly emitted by scheme)");
println!(
" [Type: ToolUse] BlockStop detection skipped (not explicitly emitted by scheme)"
);
}
}
}
@ -200,13 +203,23 @@ pub fn assert_usage_tokens(subdir: &str) {
let events = load_events_from_fixture(&fixture);
let usage_events: Vec<_> = events
.iter()
.filter_map(|e| if let Event::Usage(u) = e { Some(u) } else { None })
.filter_map(|e| {
if let Event::Usage(u) = e {
Some(u)
} else {
None
}
})
.collect();
if !usage_events.is_empty() {
let last_usage = usage_events.last().unwrap();
if last_usage.input_tokens.is_some() || last_usage.output_tokens.is_some() {
println!(" Fixture {:?} Usage: {:?}", fixture.file_name(), last_usage);
println!(
" Fixture {:?} Usage: {:?}",
fixture.file_name(),
last_usage
);
return; // Found valid usage
}
}
@ -221,7 +234,8 @@ pub fn assert_timeline_integration(subdir: &str) {
return;
}
let fixture_path = fixtures.iter()
let fixture_path = fixtures
.iter()
.find(|p| p.to_string_lossy().contains("text"))
.unwrap_or(&fixtures[0]);

View File

@ -2,13 +2,16 @@
//!
//! Workerが複数のツールを並列に実行することを確認する。
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::time::{Duration, Instant};
use async_trait::async_trait;
use worker::Worker;
use worker_types::{Event, Message, ResponseStatus, StatusEvent, Tool, ToolError, ToolResult, ToolCall, ControlFlow, HookError, WorkerHook};
use worker_types::{
ControlFlow, Event, HookError, Message, ResponseStatus, StatusEvent, Tool, ToolCall, ToolError,
ToolResult, WorkerHook,
};
mod common;
use common::MockLlmClient;
@ -105,8 +108,6 @@ async fn test_parallel_tool_execution() {
worker.register_tool(tool2);
worker.register_tool(tool3);
let messages = vec![Message::user("Run all tools")];
let start = Instant::now();
@ -161,7 +162,10 @@ async fn test_before_tool_call_skip() {
#[async_trait]
impl WorkerHook for BlockingHook {
async fn before_tool_call(&self, tool_call: &mut ToolCall) -> Result<ControlFlow, HookError> {
async fn before_tool_call(
&self,
tool_call: &mut ToolCall,
) -> Result<ControlFlow, HookError> {
if tool_call.name == "blocked_tool" {
Ok(ControlFlow::Skip)
} else {
@ -176,8 +180,16 @@ async fn test_before_tool_call_skip() {
let _result = worker.run(messages).await;
// allowed_tool は呼び出されるが、blocked_tool は呼び出されない
assert_eq!(allowed_clone.call_count(), 1, "Allowed tool should be called");
assert_eq!(blocked_clone.call_count(), 0, "Blocked tool should not be called");
assert_eq!(
allowed_clone.call_count(),
1,
"Allowed tool should be called"
);
assert_eq!(
blocked_clone.call_count(),
0,
"Blocked tool should not be called"
);
}
/// Hook: after_tool_call で結果が改変されることを確認
@ -212,9 +224,15 @@ async fn test_after_tool_call_modification() {
#[async_trait]
impl Tool for SimpleTool {
fn name(&self) -> &str { "test_tool" }
fn description(&self) -> &str { "Test" }
fn input_schema(&self) -> serde_json::Value { serde_json::json!({}) }
fn name(&self) -> &str {
"test_tool"
}
fn description(&self) -> &str {
"Test"
}
fn input_schema(&self) -> serde_json::Value {
serde_json::json!({})
}
async fn execute(&self, _: &str) -> Result<String, ToolError> {
Ok("Original Result".to_string())
}
@ -229,7 +247,10 @@ async fn test_after_tool_call_modification() {
#[async_trait]
impl WorkerHook for ModifyingHook {
async fn after_tool_call(&self, tool_result: &mut ToolResult) -> Result<ControlFlow, HookError> {
async fn after_tool_call(
&self,
tool_result: &mut ToolResult,
) -> Result<ControlFlow, HookError> {
tool_result.content = format!("[Modified] {}", tool_result.content);
*self.modified_content.lock().unwrap() = Some(tool_result.content.clone());
Ok(ControlFlow::Continue)
@ -237,7 +258,9 @@ async fn test_after_tool_call_modification() {
}
let modified_content = Arc::new(std::sync::Mutex::new(None));
worker.add_hook(ModifyingHook { modified_content: modified_content.clone() });
worker.add_hook(ModifyingHook {
modified_content: modified_content.clone(),
});
let messages = vec![Message::user("Test modification")];
let result = worker.run(messages).await;

View File

@ -2,8 +2,8 @@
//!
//! `#[tool_registry]` と `#[tool]` マクロの動作を確認する。
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
// マクロ展開に必要なインポート
use schemars;
@ -59,12 +59,19 @@ async fn test_basic_tool_generation() {
// 説明の確認docコメントから取得
let desc = greet_tool.description();
assert!(desc.contains("メッセージに挨拶を追加する"), "Description should contain doc comment: {}", desc);
assert!(
desc.contains("メッセージに挨拶を追加する"),
"Description should contain doc comment: {}",
desc
);
// スキーマの確認
let schema = greet_tool.input_schema();
println!("Schema: {}", serde_json::to_string_pretty(&schema).unwrap());
assert!(schema.get("properties").is_some(), "Schema should have properties");
assert!(
schema.get("properties").is_some(),
"Schema should have properties"
);
// 実行テスト
let result = greet_tool.execute(r#"{"message": "World"}"#).await;
@ -104,7 +111,11 @@ async fn test_no_arguments() {
let result = get_prefix_tool.execute(r#"{}"#).await;
assert!(result.is_ok());
let output = result.unwrap();
assert!(output.contains("TestPrefix"), "Should contain prefix: {}", output);
assert!(
output.contains("TestPrefix"),
"Should contain prefix: {}",
output
);
}
#[tokio::test]
@ -169,7 +180,11 @@ async fn test_result_return_type_error() {
assert!(result.is_err(), "Should fail for negative value");
let err = result.unwrap_err();
assert!(err.to_string().contains("positive"), "Error should mention positive: {}", err);
assert!(
err.to_string().contains("positive"),
"Error should mention positive: {}",
err
);
}
// =============================================================================

View File

@ -6,8 +6,8 @@
mod common;
use std::path::Path;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
use async_trait::async_trait;
use common::MockLlmClient;
@ -67,9 +67,7 @@ impl Tool for MockWeatherTool {
let input: serde_json::Value = serde_json::from_str(input_json)
.map_err(|e| ToolError::InvalidArgument(e.to_string()))?;
let city = input["city"]
.as_str()
.unwrap_or("Unknown");
let city = input["city"].as_str().unwrap_or("Unknown");
// モックのレスポンスを返す
Ok(format!("Weather in {}: Sunny, 22°C", city))
@ -163,8 +161,6 @@ async fn test_worker_tool_call() {
let tool_for_check = weather_tool.clone();
worker.register_tool(weather_tool);
// メッセージを送信
let messages = vec![worker_types::Message::user("What's the weather in Tokyo?")];
let _result = worker.run(messages).await;
@ -212,8 +208,8 @@ async fn test_worker_with_programmatic_events() {
/// id, name, inputJSONを正しく抽出できることを検証する。
#[tokio::test]
async fn test_tool_call_collector_integration() {
use worker::ToolCallCollector;
use worker::Timeline;
use worker::ToolCallCollector;
use worker_types::Event;
// ToolUseブロックを含むイベントシーケンス