{"id":17132,"date":"2025-01-29T14:01:30","date_gmt":"2025-01-29T12:01:30","guid":{"rendered":"https:\/\/spatialworld.fi\/?page_id=17132"},"modified":"2025-02-05T17:35:10","modified_gmt":"2025-02-05T15:35:10","slug":"17076-2-2-3","status":"publish","type":"page","link":"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/","title":{"rendered":"LLM RAG"},"content":{"rendered":"[vc_row type=&#8221;in_container&#8221; full_screen_row_position=&#8221;middle&#8221; column_margin=&#8221;default&#8221; column_direction=&#8221;default&#8221; column_direction_tablet=&#8221;default&#8221; column_direction_phone=&#8221;default&#8221; scene_position=&#8221;center&#8221; text_color=&#8221;dark&#8221; text_align=&#8221;left&#8221; row_border_radius=&#8221;none&#8221; row_border_radius_applies=&#8221;bg&#8221; overflow=&#8221;visible&#8221; overlay_strength=&#8221;0.3&#8243; gradient_direction=&#8221;left_to_right&#8221; shape_divider_position=&#8221;bottom&#8221; bg_image_animation=&#8221;none&#8221;][vc_column column_padding=&#8221;no-extra-padding&#8221; column_padding_tablet=&#8221;inherit&#8221; column_padding_phone=&#8221;inherit&#8221; column_padding_position=&#8221;all&#8221; column_element_direction_desktop=&#8221;default&#8221; column_element_spacing=&#8221;default&#8221; desktop_text_alignment=&#8221;default&#8221; tablet_text_alignment=&#8221;default&#8221; phone_text_alignment=&#8221;default&#8221; background_color_opacity=&#8221;1&#8243; background_hover_color_opacity=&#8221;1&#8243; column_backdrop_filter=&#8221;none&#8221; column_shadow=&#8221;none&#8221; column_border_radius=&#8221;none&#8221; column_link_target=&#8221;_self&#8221; column_position=&#8221;default&#8221; gradient_direction=&#8221;left_to_right&#8221; overlay_strength=&#8221;0.3&#8243; width=&#8221;1\/1&#8243; tablet_width_inherit=&#8221;default&#8221; animation_type=&#8221;default&#8221; bg_image_animation=&#8221;none&#8221; border_type=&#8221;simple&#8221; column_border_width=&#8221;none&#8221; column_border_style=&#8221;solid&#8221;][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<h1 class=\"reader-article-header__title\" dir=\"ltr\"><span data-scaffold-immersive-reader-title=\"\">How to Ask a Local LLM a Question Using Your Own Document Context<\/span><\/h1>\n[\/vc_column_text][\/vc_column][\/vc_row][vc_row type=&#8221;in_container&#8221; full_screen_row_position=&#8221;middle&#8221; column_margin=&#8221;default&#8221; column_direction=&#8221;default&#8221; column_direction_tablet=&#8221;default&#8221; column_direction_phone=&#8221;default&#8221; scene_position=&#8221;center&#8221; text_color=&#8221;dark&#8221; text_align=&#8221;left&#8221; row_border_radius=&#8221;none&#8221; row_border_radius_applies=&#8221;bg&#8221; overflow=&#8221;visible&#8221; overlay_strength=&#8221;0.3&#8243; gradient_direction=&#8221;left_to_right&#8221; shape_divider_position=&#8221;bottom&#8221; bg_image_animation=&#8221;none&#8221;][vc_column column_padding=&#8221;no-extra-padding&#8221; column_padding_tablet=&#8221;inherit&#8221; column_padding_phone=&#8221;inherit&#8221; column_padding_position=&#8221;all&#8221; column_element_direction_desktop=&#8221;default&#8221; column_element_spacing=&#8221;default&#8221; desktop_text_alignment=&#8221;default&#8221; tablet_text_alignment=&#8221;default&#8221; phone_text_alignment=&#8221;default&#8221; background_color_opacity=&#8221;1&#8243; background_hover_color_opacity=&#8221;1&#8243; column_backdrop_filter=&#8221;none&#8221; column_shadow=&#8221;none&#8221; column_border_radius=&#8221;none&#8221; column_link_target=&#8221;_self&#8221; column_position=&#8221;default&#8221; gradient_direction=&#8221;left_to_right&#8221; overlay_strength=&#8221;0.3&#8243; width=&#8221;1\/1&#8243; tablet_width_inherit=&#8221;default&#8221; animation_type=&#8221;default&#8221; bg_image_animation=&#8221;none&#8221; border_type=&#8221;simple&#8221; column_border_width=&#8221;none&#8221; column_border_style=&#8221;solid&#8221;][image_with_animation image_url=&#8221;17216&#8243; image_size=&#8221;medium_large&#8221; animation_type=&#8221;entrance&#8221; animation=&#8221;None&#8221; animation_movement_type=&#8221;transform_y&#8221; hover_animation=&#8221;none&#8221; alignment=&#8221;center&#8221; border_radius=&#8221;none&#8221; box_shadow=&#8221;none&#8221; image_loading=&#8221;default&#8221; max_width=&#8221;75%&#8221; max_width_mobile=&#8221;default&#8221;][\/vc_column][\/vc_row][vc_row type=&#8221;in_container&#8221; full_screen_row_position=&#8221;middle&#8221; column_margin=&#8221;default&#8221; column_direction=&#8221;default&#8221; column_direction_tablet=&#8221;default&#8221; column_direction_phone=&#8221;default&#8221; scene_position=&#8221;center&#8221; text_color=&#8221;dark&#8221; text_align=&#8221;left&#8221; row_border_radius=&#8221;none&#8221; row_border_radius_applies=&#8221;bg&#8221; overflow=&#8221;visible&#8221; overlay_strength=&#8221;0.3&#8243; gradient_direction=&#8221;left_to_right&#8221; shape_divider_position=&#8221;bottom&#8221; bg_image_animation=&#8221;none&#8221;][vc_column column_padding=&#8221;no-extra-padding&#8221; column_padding_tablet=&#8221;inherit&#8221; column_padding_phone=&#8221;inherit&#8221; column_padding_position=&#8221;all&#8221; column_element_direction_desktop=&#8221;default&#8221; column_element_spacing=&#8221;default&#8221; desktop_text_alignment=&#8221;default&#8221; tablet_text_alignment=&#8221;default&#8221; phone_text_alignment=&#8221;default&#8221; background_color_opacity=&#8221;1&#8243; background_hover_color_opacity=&#8221;1&#8243; column_backdrop_filter=&#8221;none&#8221; column_shadow=&#8221;none&#8221; column_border_radius=&#8221;none&#8221; column_link_target=&#8221;_self&#8221; column_position=&#8221;default&#8221; gradient_direction=&#8221;left_to_right&#8221; overlay_strength=&#8221;0.3&#8243; width=&#8221;1\/1&#8243; tablet_width_inherit=&#8221;default&#8221; animation_type=&#8221;default&#8221; bg_image_animation=&#8221;none&#8221; border_type=&#8221;simple&#8221; column_border_width=&#8221;none&#8221; column_border_style=&#8221;solid&#8221;][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<h3 id=\"ember51\" class=\"ember-view reader-text-block__heading-3\">Exploring Retrieval-Augmented Generation (RAG) with FME, DuckDB, and Ollama<\/h3>\n<p id=\"ember52\" class=\"ember-view reader-text-block__paragraph\">RAG, or Retrieval-Augmented Generation, bridges the gap between AI models and your unique data. By retrieving relevant information from your documents and feeding it to an AI model, you can generate more accurate, context-specific responses. This tutorial demonstrates how to use FME to create a low-code RAG workflow that allows you to ask a local LLM (like Ollama) questions using your document as the context.<\/p>\n<p id=\"ember53\" class=\"ember-view reader-text-block__paragraph\">We\u2019ll guide you through these steps:<\/p>\n<ol>\n<li>Extracting text from a document.<\/li>\n<li>Preparing the text.<\/li>\n<li>Representing the text in a vector space.<\/li>\n<li>Storing and analyzing it in a database.<\/li>\n<li>Retrieving relevant content.<\/li>\n<li>Using the content to query an AI model.<\/li>\n<\/ol>\n<hr class=\"reader-divider-block__horizontal-rule\" \/>\n<h3 id=\"ember55\" class=\"ember-view reader-text-block__heading-3\">Our Example: Asking About the ALADIN Model<\/h3>\n<p id=\"ember56\" class=\"ember-view reader-text-block__paragraph\">Imagine you\u2019re analyzing a scientific article about urban heat islands. The question of the day is: <strong>\u201cWhat is the ALADIN model?\u201d<\/strong> &#x1f42b;&#x2600;&#xfe0f;<\/p>\n<p id=\"ember57\" class=\"ember-view reader-text-block__paragraph\">We\u2019ll use a workflow powered by FME to retrieve the relevant sections of the paper and provide a context-aware answer from the local AI model.<\/p>\n<h3 id=\"ember58\" class=\"ember-view reader-text-block__heading-3\">Step 1: Extracting Text from Your Document<\/h3>\n<p id=\"ember59\" class=\"ember-view reader-text-block__paragraph\"><strong>Use Case<\/strong>: Start with a PDF or similar document and convert it into readable text for processing.<\/p>\n<p id=\"ember60\" class=\"ember-view reader-text-block__paragraph\">Our example document:<\/p>\n<ul>\n<li><strong>Title<\/strong>: Assessment of the Urban Impact on Surface and Screen-Level Temperature in the ALADIN-Climate Driven SURFEX Land Surface Model for Budapest<\/li>\n<li><strong>Authors<\/strong>: Zsebeh\u00e1zi &amp; Mah\u00f3<\/li>\n<li><strong>Published in<\/strong>: <em>Atmosphere<\/em>, vol. 12, no. 6, 709<\/li>\n<\/ul>\n<p id=\"ember62\" class=\"ember-view reader-text-block__paragraph\"><strong>Reader Used: PDF<\/strong><\/p>\n<p id=\"ember63\" class=\"ember-view reader-text-block__paragraph\">This feature type represents the extracted text content from PDF files.<\/p>\n<p id=\"ember64\" class=\"ember-view reader-text-block__paragraph\">FME\u2019s no-code design makes connecting to a PDF file straightforward. Simply drag and drop the file, configure the settings, and you\u2019re ready to process text. You could also extract images, text location etc.. All of those could feed your internal database.<\/p>\n[\/vc_column_text][image_with_animation image_url=&#8221;17215&#8243; image_size=&#8221;full&#8221; animation_type=&#8221;entrance&#8221; animation=&#8221;None&#8221; animation_movement_type=&#8221;transform_y&#8221; hover_animation=&#8221;none&#8221; alignment=&#8221;center&#8221; border_radius=&#8221;none&#8221; box_shadow=&#8221;none&#8221; image_loading=&#8221;default&#8221; max_width=&#8221;100%&#8221; max_width_mobile=&#8221;default&#8221;][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<div class=\"reader-image-block reader-image-block--full-width\" style=\"text-align: center;\">\n<figure class=\"reader-image-block__figure\"><figcaption class=\"reader-image-block__figure-image-caption display-block full-width text-body-small-open t-sans text-align-center t-black--light\"><em>Text extracted, one feature per page<\/em><\/figcaption><\/figure>\n<\/div>\n<hr class=\"reader-divider-block__horizontal-rule\" \/>\n<h3 id=\"ember66\" class=\"ember-view reader-text-block__heading-3\">Step 2: Preparing the Text for embedding (objects for the database)<\/h3>\n<p id=\"ember67\" class=\"ember-view reader-text-block__paragraph\">Split large text into smaller, meaningful chunks to make it to store to get relevant pieces of information in the database. Each chunk will have later its coordinates in the vector space.<\/p>\n<p id=\"ember68\" class=\"ember-view reader-text-block__paragraph\"><strong>Transformers Used<\/strong>:<\/p>\n<ul>\n<li><strong>StringReplacer<\/strong>: Cleans and normalizes the text.<\/li>\n<li><strong>SubstringExtractor<\/strong>: Splits text into manageable chunks (e.g., 1,000 characters) with overlapping segments for better context retention. We used FME to get the regex right.<\/li>\n<\/ul>\n<p id=\"ember70\" class=\"ember-view reader-text-block__paragraph\">It\u2019s important to balance chunk size and content. Smaller chunks are easier for AI to process, but they must still contain enough meaningful information to answer your question effectively.<\/p>\n[\/vc_column_text][image_with_animation image_url=&#8221;17214&#8243; image_size=&#8221;full&#8221; animation_type=&#8221;entrance&#8221; animation=&#8221;None&#8221; animation_movement_type=&#8221;transform_y&#8221; hover_animation=&#8221;none&#8221; alignment=&#8221;center&#8221; border_radius=&#8221;none&#8221; box_shadow=&#8221;none&#8221; image_loading=&#8221;default&#8221; max_width=&#8221;100%&#8221; max_width_mobile=&#8221;default&#8221;][\/vc_column][\/vc_row][vc_row type=&#8221;in_container&#8221; full_screen_row_position=&#8221;middle&#8221; column_margin=&#8221;default&#8221; column_direction=&#8221;default&#8221; column_direction_tablet=&#8221;default&#8221; column_direction_phone=&#8221;default&#8221; scene_position=&#8221;center&#8221; text_color=&#8221;dark&#8221; text_align=&#8221;left&#8221; row_border_radius=&#8221;none&#8221; row_border_radius_applies=&#8221;bg&#8221; overflow=&#8221;visible&#8221; overlay_strength=&#8221;0.3&#8243; gradient_direction=&#8221;left_to_right&#8221; shape_divider_position=&#8221;bottom&#8221; bg_image_animation=&#8221;none&#8221;][vc_column column_padding=&#8221;no-extra-padding&#8221; column_padding_tablet=&#8221;inherit&#8221; column_padding_phone=&#8221;inherit&#8221; column_padding_position=&#8221;all&#8221; column_element_direction_desktop=&#8221;default&#8221; column_element_spacing=&#8221;default&#8221; desktop_text_alignment=&#8221;default&#8221; tablet_text_alignment=&#8221;default&#8221; phone_text_alignment=&#8221;default&#8221; background_color_opacity=&#8221;1&#8243; background_hover_color_opacity=&#8221;1&#8243; column_backdrop_filter=&#8221;none&#8221; column_shadow=&#8221;none&#8221; column_border_radius=&#8221;none&#8221; column_link_target=&#8221;_self&#8221; column_position=&#8221;default&#8221; gradient_direction=&#8221;left_to_right&#8221; overlay_strength=&#8221;0.3&#8243; width=&#8221;1\/1&#8243; tablet_width_inherit=&#8221;default&#8221; animation_type=&#8221;default&#8221; bg_image_animation=&#8221;none&#8221; border_type=&#8221;simple&#8221; column_border_width=&#8221;none&#8221; column_border_style=&#8221;solid&#8221;][image_with_animation image_url=&#8221;17091&#8243; image_size=&#8221;full&#8221; animation_type=&#8221;entrance&#8221; animation=&#8221;None&#8221; animation_movement_type=&#8221;transform_y&#8221; hover_animation=&#8221;none&#8221; alignment=&#8221;center&#8221; border_radius=&#8221;none&#8221; box_shadow=&#8221;none&#8221; image_loading=&#8221;default&#8221; max_width=&#8221;100%&#8221; max_width_mobile=&#8221;default&#8221;][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<hr class=\"reader-divider-block__horizontal-rule\" \/>\n<h3 id=\"ember72\" class=\"ember-view reader-text-block__heading-3\">Step 3: Representing Text in a Vector Space<\/h3>\n<p id=\"ember73\" class=\"ember-view reader-text-block__paragraph\">Convert the meaning of text chunks into numerical representations (vectors) for comparison and retrieval.<\/p>\n<p id=\"ember74\" class=\"ember-view reader-text-block__paragraph\"><strong>Transformers Used<\/strong>:<\/p>\n<ul>\n<li><strong>HTTPCaller<\/strong>: Connects to Ollama\u2019s API to generate vector representations.<\/li>\n<\/ul>\n<p id=\"ember76\" class=\"ember-view reader-text-block__paragraph\">In this step, each text chunk is transformed into a <strong>vector representation<\/strong>, which captures its semantic meaning. These vectors allow the system to compare the text chunks based on their content, enabling similarity-based searches later.<\/p>\n<p id=\"ember77\" class=\"ember-view reader-text-block__paragraph\"><strong>Why Use Ollama?<\/strong><\/p>\n<ul>\n<li>Local AI models ensure data privacy (here nomic-embed-text).<\/li>\n<li>REST API makes integration simple and flexible, as there is a dedicated end point for embedding.<\/li>\n<\/ul>\n<div class=\"reader-image-block reader-image-block--full-width\"><\/div>\n[\/vc_column_text][image_with_animation image_url=&#8221;17213&#8243; image_size=&#8221;full&#8221; animation_type=&#8221;entrance&#8221; animation=&#8221;None&#8221; animation_movement_type=&#8221;transform_y&#8221; hover_animation=&#8221;none&#8221; alignment=&#8221;center&#8221; border_radius=&#8221;none&#8221; box_shadow=&#8221;none&#8221; image_loading=&#8221;default&#8221; max_width=&#8221;100%&#8221; max_width_mobile=&#8221;default&#8221;][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<p style=\"text-align: center;\"><em>HttpCaller parameters<\/em><\/p>\n[\/vc_column_text][image_with_animation image_url=&#8221;17212&#8243; image_size=&#8221;full&#8221; animation_type=&#8221;entrance&#8221; animation=&#8221;None&#8221; animation_movement_type=&#8221;transform_y&#8221; hover_animation=&#8221;none&#8221; alignment=&#8221;center&#8221; border_radius=&#8221;none&#8221; box_shadow=&#8221;none&#8221; image_loading=&#8221;default&#8221; max_width=&#8221;100%&#8221; max_width_mobile=&#8221;default&#8221;][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<p style=\"text-align: center;\"><em>Json output of the query, a vector of more than 700 dimensions representing the information location<\/em><\/p>\n[\/vc_column_text][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<hr class=\"reader-divider-block__horizontal-rule\" \/>\n<h3 id=\"ember81\" class=\"ember-view reader-text-block__heading-3\">Step 4: Storing and Searching for Relevant Text<\/h3>\n<p id=\"ember82\" class=\"ember-view reader-text-block__paragraph\">Save vector representations and quickly find the most relevant text for your question.<\/p>\n<ul>\n<li><strong>FeatureWriter<\/strong>: Saves vector data to a Parquet file for efficiency.<\/li>\n<\/ul>\n[\/vc_column_text][image_with_animation image_url=&#8221;17211&#8243; image_size=&#8221;full&#8221; animation_type=&#8221;entrance&#8221; animation=&#8221;None&#8221; animation_movement_type=&#8221;transform_y&#8221; hover_animation=&#8221;none&#8221; alignment=&#8221;center&#8221; border_radius=&#8221;none&#8221; box_shadow=&#8221;none&#8221; image_loading=&#8221;default&#8221; max_width=&#8221;100%&#8221; max_width_mobile=&#8221;default&#8221;][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<p style=\"text-align: center;\"><em>Stores text and vectors in temporary parquet file<\/em><\/p>\n<p style=\"text-align: left;\">Embed you question in another branch. To ask the closest vectors, you need the location of your question.<\/p>\n[\/vc_column_text][image_with_animation image_url=&#8221;17210&#8243; image_size=&#8221;full&#8221; animation_type=&#8221;entrance&#8221; animation=&#8221;None&#8221; animation_movement_type=&#8221;transform_y&#8221; hover_animation=&#8221;none&#8221; alignment=&#8221;center&#8221; border_radius=&#8221;none&#8221; box_shadow=&#8221;none&#8221; image_loading=&#8221;default&#8221; max_width=&#8221;100%&#8221; max_width_mobile=&#8221;default&#8221;][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<p style=\"text-align: center;\"><em>Question generation and call to embedding endpoint<\/em><\/p>\n<p style=\"text-align: left;\">Load to a database and query through SQLExecutor.<\/p>\n[\/vc_column_text][image_with_animation image_url=&#8221;17209&#8243; image_size=&#8221;full&#8221; animation_type=&#8221;entrance&#8221; animation=&#8221;None&#8221; animation_movement_type=&#8221;transform_y&#8221; hover_animation=&#8221;none&#8221; alignment=&#8221;center&#8221; border_radius=&#8221;none&#8221; box_shadow=&#8221;none&#8221; image_loading=&#8221;default&#8221; max_width=&#8221;100%&#8221; max_width_mobile=&#8221;default&#8221;][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<blockquote>\n<pre class=\"reader-text-block__code-block\">#Delimiter to send the requests to in memory duckdb in sequence\r\nFME_SQL_DELIMITER ;\r\n#Install  Vector Similarity Search Extension to be able to create an index and speed up search\r\nINSTALL vss;\r\nLOAD vss;\r\n#Create temp table from parquet. FLOAT[768] is an array of 768 dims (same as embedding from nomic)\r\nCREATE TABLE embeddings AS SELECT id, text, json_extract(content, '$')::FLOAT[768] AS embedding FROM read_parquet('embed.parquet');\r\n#Index creation, metric is specified to cosine but this is the default one) \r\nCREATE INDEX idx_embeddings_vss ON embeddings USING HNSW(embedding) WITH (metric = 'cosine');\r\n#Select the text from the 5 top answers (meaning closest in vector space)\r\nSELECT text FROM embeddings ORDER BY array_distance(embedding,@Value(_response_body)::FLOAT[768]) LIMIT 5;<\/pre>\n<\/blockquote>\n[\/vc_column_text][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<p id=\"ember89\" class=\"ember-view reader-text-block__paragraph\">DuckDB allows you to query these vectors to find the ones most relevant to your question. If you prefer to use a different database, FME makes switching seamless.<\/p>\n<p id=\"ember90\" class=\"ember-view reader-text-block__paragraph\"><strong>Why Use FME?<\/strong>: FME\u2019s no-code database connectors let you adapt the workflow for any database without technical hurdles.<\/p>\n[\/vc_column_text][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<hr class=\"reader-divider-block__horizontal-rule\" \/>\n<h3 id=\"ember92\" class=\"ember-view reader-text-block__heading-3\">Step 5: Asking the AI Model Your Question<\/h3>\n<p id=\"ember93\" class=\"ember-view reader-text-block__paragraph\"><strong>Use Case<\/strong>: Combine the retrieved text with your question and send it to the AI model for a response.<\/p>\n<p id=\"ember94\" class=\"ember-view reader-text-block__paragraph\"><strong>Transformers Used<\/strong>:<\/p>\n<ul>\n<li><strong>AttributeCreator<\/strong>: Combines the question and retrieved text. Prepares the JSON body of your POST request.<\/li>\n<li><strong>StringCleaner <\/strong>: Helps to remove forbidden characters from the text to be sent as JSON.<\/li>\n<li><strong>HTTPCaller<\/strong>: Sends the combined data to Ollama for response generation. Please beware of context size and timeout.<\/li>\n<\/ul>\n<p id=\"ember96\" class=\"ember-view reader-text-block__paragraph\">The AI model uses the retrieved text chunks as context to generate an informed and relevant response to your question.<\/p>\n[\/vc_column_text][image_with_animation image_url=&#8221;17208&#8243; image_size=&#8221;full&#8221; animation_type=&#8221;entrance&#8221; animation=&#8221;None&#8221; animation_movement_type=&#8221;transform_y&#8221; hover_animation=&#8221;none&#8221; alignment=&#8221;center&#8221; border_radius=&#8221;none&#8221; box_shadow=&#8221;none&#8221; image_loading=&#8221;default&#8221; max_width=&#8221;100%&#8221; max_width_mobile=&#8221;default&#8221;][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<blockquote>\n<pre class=\"reader-text-block__code-block\">{\r\n  \"model\": \"@Value(model)\",\r\n  \"prompt\": \" Please use the following article context : @Value(text). Then, based on it and your knowledge, answer : @Value(prompt)\",\r\n  \"stream\": false,\r\n  \"options\": {\r\n    \"num_ctx\": 4000\r\n  }\r\n}<\/pre>\n<\/blockquote>\n[\/vc_column_text][vc_column_text css=&#8221;&#8221; text_direction=&#8221;default&#8221;]\n<hr class=\"reader-divider-block__horizontal-rule\" \/>\n<h3 id=\"ember99\" class=\"ember-view reader-text-block__heading-3\">Step 6: Presenting the Results<\/h3>\n<p id=\"ember100\" class=\"ember-view reader-text-block__paragraph\">Format and deliver the AI\u2019s response in your preferred format.<\/p>\n<p id=\"ember101\" class=\"ember-view reader-text-block__paragraph\"><strong>Transformers Used<\/strong>:<\/p>\n<ul>\n<li><strong>JsonFragmenter<\/strong>: Extracts the response field from the AI\u2019s JSON output.<\/li>\n<li><strong>FeatureWriter<\/strong>: Outputs the results to a file or database.<\/li>\n<\/ul>\n<p id=\"ember103\" class=\"ember-view reader-text-block__paragraph\">With FME, you can output the results as JSON, CSV, or even push them directly into dashboards or other downstream systems.<\/p>\n<hr class=\"reader-divider-block__horizontal-rule\" \/>\n<h3 id=\"ember104\" class=\"ember-view reader-text-block__heading-3\">Why Use This Workflow?<\/h3>\n<p id=\"ember105\" class=\"ember-view reader-text-block__paragraph\"><strong>FME\u2019s Strengths<\/strong>:<\/p>\n<ol>\n<li><strong>Connect with No Code<\/strong>: Easily work with PDFs, databases, or APIs without writing code.<\/li>\n<li><strong>Flexible Database Options<\/strong>: Switch databases (e.g., DuckDB to PostgreSQL) with minimal effort.<\/li>\n<li><strong>Seamless API Integration<\/strong>: Effortlessly connect to APIs like Ollama\u2019s REST API for AI-powered processing.<\/li>\n<\/ol>\n<p id=\"ember107\" class=\"ember-view reader-text-block__paragraph\"><strong>Ollama\u2019s Benefits<\/strong>:<\/p>\n<ul>\n<li><strong>Local AI Models<\/strong>: Ensures data privacy and control.<\/li>\n<li><strong>REST API<\/strong>: Simplifies vector generation and querying.<\/li>\n<\/ul>\n<p id=\"ember109\" class=\"ember-view reader-text-block__paragraph\"><strong>DuckDB Advantages<\/strong>:<\/p>\n<ul>\n<li><strong>In-Memory Performance<\/strong>: Fast processing and query execution.<\/li>\n<li><strong>SQL Compatibility<\/strong>: Familiar syntax for easy integration.<\/li>\n<li><strong>No need for installation<\/strong>: As we plan to publish this on FMEHub, this is lighter than installing Postgres, for example.<\/li>\n<\/ul>\n<hr class=\"reader-divider-block__horizontal-rule\" \/>\n<h3 id=\"ember111\" class=\"ember-view reader-text-block__heading-3\">Next Steps: Improving the Workflow<\/h3>\n<p id=\"ember112\" class=\"ember-view reader-text-block__paragraph\">Here are a few ways to build on this foundation:<\/p>\n<ol>\n<li><strong>Expand Data Sources<\/strong>: Integrate real-time APIs, cloud storage, raster data or web scraping. With FME any data is reachable.<\/li>\n<li><strong>Enhance Automation<\/strong>: Deploy the workflow on FME Server for scheduling and scalability.<\/li>\n<li><strong>Try Advanced Options<\/strong>: Use more specific models or parameters like temperature to see how it influence the answer.<\/li>\n<li><strong>Interactive Outputs<\/strong>: Push the results to dashboards or notifications for actionable insights.<\/li>\n<\/ol>\n<hr class=\"reader-divider-block__horizontal-rule\" \/>\n<p id=\"ember114\" class=\"ember-view reader-text-block__paragraph\">This workflow showcases how FME can simplify RAG workflows by combining your documents with local AI models like Ollama. With minimal effort, you can build flexible, powerful solutions tailored to your needs. If you&#8217;re curious about taking this approach further, feel free to reach out for more insights!<\/p>\n[\/vc_column_text][\/vc_column][\/vc_row]","protected":false},"excerpt":{"rendered":"<p>[vc_row type=&#8221;in_container&#8221; full_screen_row_position=&#8221;middle&#8221; column_margin=&#8221;default&#8221; column_direction=&#8221;default&#8221; column_direction_tablet=&#8221;default&#8221; column_direction_phone=&#8221;default&#8221; scene_position=&#8221;center&#8221; text_color=&#8221;dark&#8221; text_align=&#8221;left&#8221; row_border_radius=&#8221;none&#8221; row_border_radius_applies=&#8221;bg&#8221; overflow=&#8221;visible&#8221; overlay_strength=&#8221;0.3&#8243; gradient_direction=&#8221;left_to_right&#8221; shape_divider_position=&#8221;bottom&#8221; bg_image_animation=&#8221;none&#8221;][vc_column column_padding=&#8221;no-extra-padding&#8221; column_padding_tablet=&#8221;inherit&#8221; column_padding_phone=&#8221;inherit&#8221; column_padding_position=&#8221;all&#8221; column_element_direction_desktop=&#8221;default&#8221; column_element_spacing=&#8221;default&#8221; desktop_text_alignment=&#8221;default&#8221; tablet_text_alignment=&#8221;default&#8221; phone_text_alignment=&#8221;default&#8221; background_color_opacity=&#8221;1&#8243; background_hover_color_opacity=&#8221;1&#8243; column_backdrop_filter=&#8221;none&#8221; column_shadow=&#8221;none&#8221;&#8230;<\/p>\n","protected":false},"author":6,"featured_media":0,"parent":0,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"","meta":{"footnotes":""},"class_list":["post-17132","page","type-page","status-publish"],"yoast_head":"<!-- This site is optimized with the Yoast SEO plugin v27.1.1 - https:\/\/yoast.com\/product\/yoast-seo-wordpress\/ -->\n<title>LLM RAG - Spatialworld Oy<\/title>\n<meta name=\"robots\" content=\"index, follow, max-snippet:-1, max-image-preview:large, max-video-preview:-1\" \/>\n<link rel=\"canonical\" href=\"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/\" \/>\n<meta property=\"og:locale\" content=\"en_US\" \/>\n<meta property=\"og:type\" content=\"article\" \/>\n<meta property=\"og:title\" content=\"LLM RAG - Spatialworld Oy\" \/>\n<meta property=\"og:description\" content=\"[vc_row type=&#8221;in_container&#8221; full_screen_row_position=&#8221;middle&#8221; column_margin=&#8221;default&#8221; column_direction=&#8221;default&#8221; column_direction_tablet=&#8221;default&#8221; column_direction_phone=&#8221;default&#8221; scene_position=&#8221;center&#8221; text_color=&#8221;dark&#8221; text_align=&#8221;left&#8221; row_border_radius=&#8221;none&#8221; row_border_radius_applies=&#8221;bg&#8221; overflow=&#8221;visible&#8221; overlay_strength=&#8221;0.3&#8243; gradient_direction=&#8221;left_to_right&#8221; shape_divider_position=&#8221;bottom&#8221; bg_image_animation=&#8221;none&#8221;][vc_column column_padding=&#8221;no-extra-padding&#8221; column_padding_tablet=&#8221;inherit&#8221; column_padding_phone=&#8221;inherit&#8221; column_padding_position=&#8221;all&#8221; column_element_direction_desktop=&#8221;default&#8221; column_element_spacing=&#8221;default&#8221; desktop_text_alignment=&#8221;default&#8221; tablet_text_alignment=&#8221;default&#8221; phone_text_alignment=&#8221;default&#8221; background_color_opacity=&#8221;1&#8243; background_hover_color_opacity=&#8221;1&#8243; column_backdrop_filter=&#8221;none&#8221; column_shadow=&#8221;none&#8221;...\" \/>\n<meta property=\"og:url\" content=\"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/\" \/>\n<meta property=\"og:site_name\" content=\"Spatialworld Oy\" \/>\n<meta property=\"article:modified_time\" content=\"2025-02-05T15:35:10+00:00\" \/>\n<meta name=\"twitter:label1\" content=\"Est. reading time\" \/>\n\t<meta name=\"twitter:data1\" content=\"9 minutes\" \/>\n<script type=\"application\/ld+json\" class=\"yoast-schema-graph\">{\"@context\":\"https:\/\/schema.org\",\"@graph\":[{\"@type\":\"WebPage\",\"@id\":\"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/\",\"url\":\"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/\",\"name\":\"LLM RAG - Spatialworld Oy\",\"isPartOf\":{\"@id\":\"https:\/\/spatialworld.fi\/en\/#website\"},\"datePublished\":\"2025-01-29T12:01:30+00:00\",\"dateModified\":\"2025-02-05T15:35:10+00:00\",\"breadcrumb\":{\"@id\":\"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/#breadcrumb\"},\"inLanguage\":\"en-US\",\"potentialAction\":[{\"@type\":\"ReadAction\",\"target\":[[\"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/\"]]}]},{\"@type\":\"BreadcrumbList\",\"@id\":\"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/#breadcrumb\",\"itemListElement\":[{\"@type\":\"ListItem\",\"position\":1,\"name\":\"Home\",\"item\":\"https:\/\/spatialworld.fi\/en\/\"},{\"@type\":\"ListItem\",\"position\":2,\"name\":\"LLM RAG\"}]},{\"@type\":\"WebSite\",\"@id\":\"https:\/\/spatialworld.fi\/en\/#website\",\"url\":\"https:\/\/spatialworld.fi\/en\/\",\"name\":\"Spatialworld Oy\",\"description\":\"\",\"potentialAction\":[{\"@type\":\"SearchAction\",\"target\":{\"@type\":\"EntryPoint\",\"urlTemplate\":\"https:\/\/spatialworld.fi\/en\/?s={search_term_string}\"},\"query-input\":{\"@type\":\"PropertyValueSpecification\",\"valueRequired\":true,\"valueName\":\"search_term_string\"}}],\"inLanguage\":\"en-US\"}]}<\/script>\n<!-- \/ Yoast SEO plugin. -->","yoast_head_json":{"title":"LLM RAG - Spatialworld Oy","robots":{"index":"index","follow":"follow","max-snippet":"max-snippet:-1","max-image-preview":"max-image-preview:large","max-video-preview":"max-video-preview:-1"},"canonical":"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/","og_locale":"en_US","og_type":"article","og_title":"LLM RAG - Spatialworld Oy","og_description":"[vc_row type=&#8221;in_container&#8221; full_screen_row_position=&#8221;middle&#8221; column_margin=&#8221;default&#8221; column_direction=&#8221;default&#8221; column_direction_tablet=&#8221;default&#8221; column_direction_phone=&#8221;default&#8221; scene_position=&#8221;center&#8221; text_color=&#8221;dark&#8221; text_align=&#8221;left&#8221; row_border_radius=&#8221;none&#8221; row_border_radius_applies=&#8221;bg&#8221; overflow=&#8221;visible&#8221; overlay_strength=&#8221;0.3&#8243; gradient_direction=&#8221;left_to_right&#8221; shape_divider_position=&#8221;bottom&#8221; bg_image_animation=&#8221;none&#8221;][vc_column column_padding=&#8221;no-extra-padding&#8221; column_padding_tablet=&#8221;inherit&#8221; column_padding_phone=&#8221;inherit&#8221; column_padding_position=&#8221;all&#8221; column_element_direction_desktop=&#8221;default&#8221; column_element_spacing=&#8221;default&#8221; desktop_text_alignment=&#8221;default&#8221; tablet_text_alignment=&#8221;default&#8221; phone_text_alignment=&#8221;default&#8221; background_color_opacity=&#8221;1&#8243; background_hover_color_opacity=&#8221;1&#8243; column_backdrop_filter=&#8221;none&#8221; column_shadow=&#8221;none&#8221;...","og_url":"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/","og_site_name":"Spatialworld Oy","article_modified_time":"2025-02-05T15:35:10+00:00","twitter_misc":{"Est. reading time":"9 minutes"},"schema":{"@context":"https:\/\/schema.org","@graph":[{"@type":"WebPage","@id":"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/","url":"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/","name":"LLM RAG - Spatialworld Oy","isPartOf":{"@id":"https:\/\/spatialworld.fi\/en\/#website"},"datePublished":"2025-01-29T12:01:30+00:00","dateModified":"2025-02-05T15:35:10+00:00","breadcrumb":{"@id":"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/#breadcrumb"},"inLanguage":"en-US","potentialAction":[{"@type":"ReadAction","target":[["https:\/\/spatialworld.fi\/en\/17076-2-2-3\/"]]}]},{"@type":"BreadcrumbList","@id":"https:\/\/spatialworld.fi\/en\/17076-2-2-3\/#breadcrumb","itemListElement":[{"@type":"ListItem","position":1,"name":"Home","item":"https:\/\/spatialworld.fi\/en\/"},{"@type":"ListItem","position":2,"name":"LLM RAG"}]},{"@type":"WebSite","@id":"https:\/\/spatialworld.fi\/en\/#website","url":"https:\/\/spatialworld.fi\/en\/","name":"Spatialworld Oy","description":"","potentialAction":[{"@type":"SearchAction","target":{"@type":"EntryPoint","urlTemplate":"https:\/\/spatialworld.fi\/en\/?s={search_term_string}"},"query-input":{"@type":"PropertyValueSpecification","valueRequired":true,"valueName":"search_term_string"}}],"inLanguage":"en-US"}]}},"_links":{"self":[{"href":"https:\/\/spatialworld.fi\/en\/wp-json\/wp\/v2\/pages\/17132","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/spatialworld.fi\/en\/wp-json\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/spatialworld.fi\/en\/wp-json\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/spatialworld.fi\/en\/wp-json\/wp\/v2\/users\/6"}],"replies":[{"embeddable":true,"href":"https:\/\/spatialworld.fi\/en\/wp-json\/wp\/v2\/comments?post=17132"}],"version-history":[{"count":8,"href":"https:\/\/spatialworld.fi\/en\/wp-json\/wp\/v2\/pages\/17132\/revisions"}],"predecessor-version":[{"id":17310,"href":"https:\/\/spatialworld.fi\/en\/wp-json\/wp\/v2\/pages\/17132\/revisions\/17310"}],"wp:attachment":[{"href":"https:\/\/spatialworld.fi\/en\/wp-json\/wp\/v2\/media?parent=17132"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}