赞
踩
- mkdir llama
- cd llama
- cd llama.cpp
- mkdir build
- cd build/
- cmake .. -DBUILD_SHARED_LIBS=ON
- cd ..
- cmake --build build --config Release -j --verbose
- using UnrealBuildTool;
- using System.IO;
-
- public class UELlama : ModuleRules
- {
- public UELlama(ReadOnlyTargetRules Target) : base(Target)
- {
- PCHUsage = ModuleRules.PCHUsageMode.UseExplicitOrSharedPCHs;
-
- PublicIncludePaths.AddRange(
- new string[] {
- // ... add public include paths required here ...
- }
- );
-
-
- PrivateIncludePaths.AddRange(
- new string[] {
- }
- );
-
-
- PublicDependencyModuleNames.AddRange(
- new string[]
- {
- "Core",
- // ... add other public dependencies that you statically link with here ...
- }
- );
-
-
- PrivateDependencyModuleNames.AddRange(
- new string[]
- {
- "CoreUObject",
- "Engine",
- "Slate",
- "SlateCore",
- // ... add private dependencies that you statically link with here ...
- }
- );
-
- if (Target.bBuildEditor)
- {
- PrivateDependencyModuleNames.AddRange(
- new string[]
- {
- "UnrealEd"
- }
- );
- }
-
- if (Target.Platform == UnrealTargetPlatform.Win64)
- {
- string PluginBinariesDir = Path.Combine(ModuleDirectory, "..", "..", "Binaries", "Win64");
- string ProjectBinariesDir = Path.Combine(ModuleDirectory, "..", "..", "..", "..", "Binaries", "Win64");
-
- string DLLFilePath = Path.Combine(ProjectBinariesDir, "llama.dll");
- string DestinationDLLPath = Path.Combine(PluginBinariesDir, "llama.dll");
-
- RuntimeDependencies.Add(DLLFilePath, DestinationDLLPath);
- }
-
- DynamicallyLoadedModuleNames.AddRange(
- new string[]
- {
- // ... add any modules that your module loads dynamically here ...
- }
- );
- if (Target.Platform == UnrealTargetPlatform.Linux)
- {
- PublicAdditionalLibraries.Add(Path.Combine(PluginDirectory, "Libraries", "libllama.so"));
- PublicIncludePaths.Add(Path.Combine(PluginDirectory, "Includes"));
- }
- else if (Target.Platform == UnrealTargetPlatform.Win64)
- {
- PublicAdditionalLibraries.Add(Path.Combine(PluginDirectory, "Libraries", "llama.lib"));
- PublicIncludePaths.Add(Path.Combine(PluginDirectory, "Includes"));
- }
-
- }
- }
A new line, the value “Human:”, and the value “AI:”.Our goal is to generate only a single line of text that corresponds to the current speaker.
F:\Projects\UE_Projects\5.1\UE5LLAMA\Content\Movies\Models\openchat_3.5.Q3_K_L.gguf
- best_of;
- The completion can’t change the speaker.
- The completion won’t allow the speaker to speak twice in a row.
添加函数Add Token:
事件图表:
-
- User:
- {prompt}
- GPT4:
后言:该项目实现了离线AI聊天功能,响应及时。但目前还有部分问题如:回答中文时部分文字呈现为?号,可能根据不同模型有不同的问题,可以自行测试该网站中的其他语言模型。
希望这篇文章能帮到你!!
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。