import * as plugins from './plugins.js';
import * as paths from './paths.js';
import { MultiModalModel } from './abstract.classes.multimodal.js';
import type { ChatOptions, ChatResponse, ChatMessage } from './abstract.classes.multimodal.js';
import type { ImageBlockParam, TextBlockParam } from '@anthropic-ai/sdk/resources/messages';

type ContentBlock = ImageBlockParam | TextBlockParam;

export interface IAnthropicProviderOptions {
  anthropicToken: string;
}

export class AnthropicProvider extends MultiModalModel {
  private options: IAnthropicProviderOptions;
  public anthropicApiClient: plugins.anthropic.default;

  constructor(optionsArg: IAnthropicProviderOptions) {
    super();
    this.options = optionsArg // Ensure the token is stored
  }

  async start() {
    this.anthropicApiClient = new plugins.anthropic.default({
      apiKey: this.options.anthropicToken,
    });
  }

  async stop() {}

  public async chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>> {
    // Create a TextDecoder to handle incoming chunks
    const decoder = new TextDecoder();
    let buffer = '';
    let currentMessage: { role: string; content: string; } | null = null;

    // Create a TransformStream to process the input
    const transform = new TransformStream<Uint8Array, string>({
      async transform(chunk, controller) {
        buffer += decoder.decode(chunk, { stream: true });

        // Try to parse complete JSON messages from the buffer
        while (true) {
          const newlineIndex = buffer.indexOf('\n');
          if (newlineIndex === -1) break;

          const line = buffer.slice(0, newlineIndex);
          buffer = buffer.slice(newlineIndex + 1);

          if (line.trim()) {
            try {
              const message = JSON.parse(line);
              currentMessage = {
                role: message.role || 'user',
                content: message.content || '',
              };
            } catch (e) {
              console.error('Failed to parse message:', e);
            }
          }
        }

        // If we have a complete message, send it to Anthropic
        if (currentMessage) {
          const stream = await this.anthropicApiClient.messages.create({
            model: 'claude-3-opus-20240229',
            messages: [{ role: currentMessage.role, content: currentMessage.content }],
            system: '',
            stream: true,
            max_tokens: 4000,
          });

          // Process each chunk from Anthropic
          for await (const chunk of stream) {
            const content = chunk.delta?.text;
            if (content) {
              controller.enqueue(content);
            }
          }

          currentMessage = null;
        }
      },

      flush(controller) {
        if (buffer) {
          try {
            const message = JSON.parse(buffer);
            controller.enqueue(message.content || '');
          } catch (e) {
            console.error('Failed to parse remaining buffer:', e);
          }
        }
      }
    });

    // Connect the input to our transform stream
    return input.pipeThrough(transform);
  }

  // Implementing the synchronous chat interaction
  public async chat(optionsArg: ChatOptions): Promise<ChatResponse> {
    // Convert message history to Anthropic format
    const messages = optionsArg.messageHistory.map(msg => ({
      role: msg.role === 'assistant' ? 'assistant' as const : 'user' as const,
      content: msg.content
    }));

    const result = await this.anthropicApiClient.messages.create({
      model: 'claude-3-opus-20240229',
      system: optionsArg.systemMessage,
      messages: [
        ...messages,
        { role: 'user' as const, content: optionsArg.userMessage }
      ],
      max_tokens: 4000,
    });

    // Extract text content from the response
    let message = '';
    for (const block of result.content) {
      if ('text' in block) {
        message += block.text;
      }
    }

    return {
      role: 'assistant' as const,
      message,
    };
  }

  public async audio(optionsArg: { message: string }): Promise<NodeJS.ReadableStream> {
    // Anthropic does not provide an audio API, so this method is not implemented.
    throw new Error('Audio generation is not yet supported by Anthropic.');
  }

  public async vision(optionsArg: { image: Buffer; prompt: string }): Promise<string> {
    const base64Image = optionsArg.image.toString('base64');
    
    const content: ContentBlock[] = [
      {
        type: 'text',
        text: optionsArg.prompt
      },
      {
        type: 'image',
        source: {
          type: 'base64',
          media_type: 'image/jpeg',
          data: base64Image
        }
      }
    ];

    const result = await this.anthropicApiClient.messages.create({
      model: 'claude-3-opus-20240229',
      messages: [{
        role: 'user',
        content
      }],
      max_tokens: 1024
    });

    // Extract text content from the response
    let message = '';
    for (const block of result.content) {
      if ('text' in block) {
        message += block.text;
      }
    }
    return message;
  }

  public async document(optionsArg: {
    systemMessage: string;
    userMessage: string;
    pdfDocuments: Uint8Array[];
    messageHistory: ChatMessage[];
  }): Promise<{ message: any }> {
    // Convert PDF documents to images using SmartPDF
    const smartpdfInstance = new plugins.smartpdf.SmartPdf();
    let documentImageBytesArray: Uint8Array[] = [];

    for (const pdfDocument of optionsArg.pdfDocuments) {
      const documentImageArray = await smartpdfInstance.convertPDFToPngBytes(pdfDocument);
      documentImageBytesArray = documentImageBytesArray.concat(documentImageArray);
    }

    // Convert message history to Anthropic format
    const messages = optionsArg.messageHistory.map(msg => ({
      role: msg.role === 'assistant' ? 'assistant' as const : 'user' as const,
      content: msg.content
    }));

    // Create content array with text and images
    const content: ContentBlock[] = [
      {
        type: 'text',
        text: optionsArg.userMessage
      }
    ];

    // Add each document page as an image
    for (const imageBytes of documentImageBytesArray) {
      content.push({
        type: 'image',
        source: {
          type: 'base64',
          media_type: 'image/jpeg',
          data: Buffer.from(imageBytes).toString('base64')
        }
      });
    }

    const result = await this.anthropicApiClient.messages.create({
      model: 'claude-3-opus-20240229',
      system: optionsArg.systemMessage,
      messages: [
        ...messages,
        { role: 'user', content }
      ],
      max_tokens: 4096
    });

    // Extract text content from the response
    let message = '';
    for (const block of result.content) {
      if ('text' in block) {
        message += block.text;
      }
    }

    return {
      message: {
        role: 'assistant',
        content: message
      }
    };
  }
}