赞
踩
Java的IO是什么意思?
IO就是输入和输出,文件的写入和读取。
用Java实现文件读取:(传统写法,之后被NIO代替)
使用
BufferedReader
和FileReader
读取文件内容:
import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; public class ReadFileExample { public static void main(String[] args) { try (BufferedReader br = new BufferedReader(new FileReader("example.txt"))) { String line; while ((line = br.readLine()) != null) { System.out.println(line); } } catch (IOException e) { e.printStackTrace(); } } }
用Java实现文件写入:
使用
BufferedWriter
和FileWriter
写入文件内容:
import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; public class WriteFileExample { public static void main(String[] args) { try (BufferedWriter bw = new BufferedWriter(new FileWriter("output.txt"))) { bw.write("Hello, world!"); } catch (IOException e) { e.printStackTrace(); } } }
Java中的多线程实现---extends Thread
- public class MyThread extends Thread {
- @Override
- public void run() {
- System.out.println("Thread is running: " + Thread.currentThread().getName());
- }
-
- public static void main(String[] args) {
- MyThread thread1 = new MyThread();
- MyThread thread2 = new MyThread();
-
- thread1.start(); // 启动线程
- thread2.start(); // 启动另一个线程
- }
- }
Java中的多线程实现--- new Thread + implements Runnable
- public class MyRunnable implements Runnable {
- @Override
- public void run() {
- System.out.println("Runnable is running: " + Thread.currentThread().getName());
- }
-
- public static void main(String[] args) {
- Thread thread1 = new Thread(new MyRunnable());
- Thread thread2 = new Thread(new MyRunnable());
-
- thread1.start(); // 启动线程
- thread2.start(); // 启动另一个线程
- }
- }
Java中的集合框架类:(队列,集合,键值对)
Collection ├── List │ ├── ArrayList │ ├── LinkedList │ ├── Vector │ └── Stack ├── Set │ ├── HashSet │ ├── LinkedHashSet │ └── TreeSet └── Queue ├── LinkedList └── PriorityQueue Map ├── HashMap ├── LinkedHashMap ├── TreeMap ├── Hashtable └── ConcurrentHashMap Deque ├── ArrayDeque └── LinkedList
JVM负责编译代码吗?
JVM不负责编译Java代码,而是JDK负责将Java代码编译为字节码。
JVM运行字节码
Java中的运行时数据区 (不同区域的变量)
方法区 -----静态变量 常量
堆区----实例变量
栈区----局部变量
public class TestClass { private static int staticVar = 0; // 静态变量,存储在方法区 private int instanceVar; // 实例变量,存储在堆区 public static void main(String[] args) { TestClass obj1 = new TestClass(); TestClass obj2 = new TestClass(); obj1.instanceVar = 1; obj2.instanceVar = 2; TestClass.staticVar = 3; System.out.println(obj1.instanceVar); // 输出 1 System.out.println(obj2.instanceVar); // 输出 2 System.out.println(TestClass.staticVar); // 输出 3 } }局部变量----栈区:
public class StackExample { public static void main(String[] args) { int x = 5; // 局部变量,存储在栈区 int result = factorial(x); // 方法调用,factorial 方法的栈帧被创建 System.out.println("Factorial of " + x + " is " + result); } public static int factorial(int n) { if (n == 1) { return 1; // 基本情况,栈帧开始销毁 } else { return n * factorial(n - 1); // 递归调用,新的栈帧被创建 } } }
Java NIO框架
Java NIO(New Input/Output) 新输入输出库
替代传统的I/O操作
使用Java NIO读取文件:
- import java.io.IOException;
- import java.nio.ByteBuffer;
- import java.nio.channels.FileChannel;
- import java.nio.file.Path;
- import java.nio.file.Paths;
- import java.nio.file.StandardOpenOption;
-
- public class NIOExample {
- public static void main(String[] args) {
- // 定义要读取的文件路径
- Path path = Paths.get("example.txt");
-
- // 使用try-with-resources语句确保FileChannel在操作完成后自动关闭
- try (FileChannel fileChannel = FileChannel.open(path, StandardOpenOption.READ)) {
-
- // 分配一个大小为1024字节的缓冲区
- ByteBuffer buffer = ByteBuffer.allocate(1024);
-
- // 从通道中读取数据到缓冲区
- int bytesRead = fileChannel.read(buffer);
-
- // 当没有更多字节可读时,read方法返回-1
- while (bytesRead != -1) {
- // 切换缓冲区为读模式
- buffer.flip();
-
- // 循环读取缓冲区中的数据,直到没有剩余的字节
- while (buffer.hasRemaining()) {
- // 每次读取一个字节,并转换为字符输出
- System.out.print((char) buffer.get());
- }
-
- // 清空缓冲区,准备下一次读取
- buffer.clear();
-
- // 继续从通道中读取数据到缓冲区
- bytesRead = fileChannel.read(buffer);
- }
-
- } catch (IOException e) {
- // 捕获并处理任何I/O异常
- e.printStackTrace();
- }
- }
- }
如何理解Netty?
基于NIO的框架
Netty可以自己建设服务器8080,而不用使用tomcat
如何使用Netty创建一个服务器:
- import io.netty.bootstrap.ServerBootstrap;
- import io.netty.channel.ChannelFuture;
- import io.netty.channel.ChannelInitializer;
- import io.netty.channel.ChannelPipeline;
- import io.netty.channel.EventLoopGroup;
- import io.netty.channel.nio.NioEventLoopGroup;
- import io.netty.channel.socket.SocketChannel;
- import io.netty.channel.socket.nio.NioServerSocketChannel;
- import io.netty.handler.codec.string.StringDecoder;
- import io.netty.handler.codec.string.StringEncoder;
-
- public class EchoServer {
-
- private final int port;
-
- // 构造函数,初始化服务器端口
- public EchoServer(int port) {
- this.port = port;
- }
-
- // 启动服务器方法
- public void start() throws InterruptedException {
- // 用于接收客户端连接的线程组
- EventLoopGroup bossGroup = new NioEventLoopGroup();
- // 用于处理客户端I/O操作的线程组
- EventLoopGroup workerGroup = new NioEventLoopGroup();
- try {
- // 创建并配置ServerBootstrap
- ServerBootstrap bootstrap = new ServerBootstrap();
- bootstrap.group(bossGroup, workerGroup)
- .channel(NioServerSocketChannel.class) // 指定使用NIO传输Channel
- .childHandler(new ChannelInitializer<SocketChannel>() { // 配置子Channel
- @Override
- protected void initChannel(SocketChannel socketChannel) {
- // 获取ChannelPipeline
- ChannelPipeline pipeline = socketChannel.pipeline();
- // 添加String解码器
- pipeline.addLast(new StringDecoder());
- // 添加String编码器
- pipeline.addLast(new StringEncoder());
- // 添加自定义的业务逻辑处理器
- pipeline.addLast(new EchoServerHandler());
- }
- });
-
- // 绑定服务器端口并启动
- ChannelFuture future = bootstrap.bind(port).sync();
- // 等待服务器套接字关闭
- future.channel().closeFuture().sync();
- } finally {
- // 优雅关闭线程组
- bossGroup.shutdownGracefully();
- workerGroup.shutdownGracefully();
- }
- }
-
- public static void main(String[] args) throws InterruptedException {
- // 创建并启动Echo服务器
- new EchoServer(8080).start();
- }
- }
-
- // 自定义的业务逻辑处理器
- import io.netty.channel.ChannelHandlerContext;
- import io.netty.channel.SimpleChannelInboundHandler;
-
- public class EchoServerHandler extends SimpleChannelInboundHandler<String> {
-
- @Override
- protected void channelRead0(ChannelHandlerContext ctx, String msg) {
- // 打印接收到的消息
- System.out.println("Received: " + msg);
- // 将接收到的消息回写给客户端
- ctx.writeAndFlush(msg);
- }
-
- @Override
- public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
- // 打印异常栈信息
- cause.printStackTrace();
- // 关闭Channel
- ctx.close();
- }
- }
Java 的 web开发框架都有哪些?
spring boot
spring MVC
Netty属于web开发框架吗?
不属于,netty属于网络通信框架
如何理解网络通信框架和web开发框架的区别和使用场景?
网络通信框架可以基于TCP UDP的数据传输。
web开发框架是基于网页的HTTP的数据传输。
使用Netty的网络通信示例:用TCP传送JSON数据:
服务端:
import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.handler.codec.json.JsonObjectDecoder; import io.netty.handler.codec.string.StringEncoder; public class JsonServer { private final int port; public JsonServer(int port) { this.port = port; } public void start() throws InterruptedException { // 创建两个事件循环组,用于处理连接和数据传输 EventLoopGroup bossGroup = new NioEventLoopGroup(); EventLoopGroup workerGroup = new NioEventLoopGroup(); try { ServerBootstrap b = new ServerBootstrap(); b.group(bossGroup, workerGroup) // 指定使用NIO的ServerSocketChannel来处理传入的连接请求 .channel(NioServerSocketChannel.class) .childHandler(new ChannelInitializer<SocketChannel>() { @Override public void initChannel(SocketChannel ch) { ChannelPipeline p = ch.pipeline(); // 添加JSON解码器,将接收到的字节流转换为JSON对象 p.addLast(new JsonObjectDecoder()); // 添加字符串编码器,用于向客户端发送数据 p.addLast(new StringEncoder()); // 添加自定义处理器,用于处理接收到的JSON数据 p.addLast(new JsonServerHandler()); } }); // 绑定端口并启动服务器 ChannelFuture f = b.bind(port).sync(); f.channel().closeFuture().sync(); } finally { // 优雅关闭事件循环组 bossGroup.shutdownGracefully(); workerGroup.shutdownGracefully(); } } public static void main(String[] args) throws InterruptedException { new JsonServer(8080).start(); } }客户端:
import io.netty.bootstrap.Bootstrap; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelInitializer; import io.netty.channel.ChannelPipeline; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.SocketChannel; import io.netty.channel.socket.nio.NioSocketChannel; import io.netty.handler.codec.json.JsonObjectEncoder; import io.netty.handler.codec.string.StringDecoder; public class JsonClient { private final String host; private final int port; public JsonClient(String host, int port) { this.host = host; this.port = port; } public void start() throws InterruptedException { EventLoopGroup group = new NioEventLoopGroup(); try { Bootstrap b = new Bootstrap(); b.group(group) // 指定使用NIO的SocketChannel来进行连接 .channel(NioSocketChannel.class) .handler(new ChannelInitializer<SocketChannel>() { @Override public void initChannel(SocketChannel ch) { ChannelPipeline p = ch.pipeline(); // 添加JSON编码器,用于向服务端发送JSON数据 p.addLast(new JsonObjectEncoder()); // 添加字符串解码器,将服务端返回的数据转换为字符串 p.addLast(new StringDecoder()); // 添加自定义处理器,用于向服务端发送JSON数据 p.addLast(new JsonClientHandler()); } }); // 连接到服务器并启动客户端 ChannelFuture f = b.connect(host, port).sync(); f.channel().closeFuture().sync(); } finally { // 优雅关闭事件循环组 group.shutdownGracefully(); } } public static void main(String[] args) throws InterruptedException { new JsonClient("localhost", 8080).start(); } }
如何使用ES?
下载ES,
在命令行中进入Elasticsearch的
bin
目录,运行elasticsearch.bat
。
如何存放ES数据?
- import org.apache.http.HttpHost;
- import org.elasticsearch.action.index.IndexRequest;
- import org.elasticsearch.action.index.IndexResponse;
- import org.elasticsearch.client.RequestOptions;
- import org.elasticsearch.client.RestClient;
- import org.elasticsearch.client.RestHighLevelClient;
- import org.elasticsearch.common.xcontent.XContentType;
-
- import java.io.IOException;
-
- public class ElasticsearchExample {
- public static void main(String[] args) {
- // 创建Elasticsearch客户端
- RestHighLevelClient client = new RestHighLevelClient(
- RestClient.builder(new HttpHost("localhost", 9200, "http")));
-
- // 准备要索引的JSON文档
- String jsonString = "{" +
- "\"user\":\"kimchy\"," +
- "\"postDate\":\"2024-06-10\"," +
- "\"message\":\"trying out Elasticsearch\"" +
- "}";
-
- // 创建索引请求,将文档存储到索引"posts"中,ID为1
- IndexRequest indexRequest = new IndexRequest("posts")
- .id("1") // 可以省略ID,让Elasticsearch自动生成
- .source(jsonString, XContentType.JSON);
-
- try {
- // 执行索引请求
- IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
- System.out.println("Indexed document with id: " + indexResponse.getId());
- } catch (IOException e) {
- e.printStackTrace();
- } finally {
- try {
- // 关闭客户端
- client.close();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
- }
- }
使用Elasticsearch进行数据索引和查询的简单示例
- import org.elasticsearch.action.index.IndexRequest;
- import org.elasticsearch.action.index.IndexResponse;
- import org.elasticsearch.action.search.SearchRequest;
- import org.elasticsearch.action.search.SearchResponse;
- import org.elasticsearch.client.RequestOptions;
- import org.elasticsearch.client.RestHighLevelClient;
- import org.elasticsearch.client.RestClient;
- import org.elasticsearch.common.xcontent.XContentType;
- import org.elasticsearch.index.query.QueryBuilders;
- import org.elasticsearch.search.builder.SearchSourceBuilder;
-
- import java.io.IOException;
-
- public class ElasticsearchExample {
- public static void main(String[] args) {
- // 创建Elasticsearch客户端
- RestHighLevelClient client = new RestHighLevelClient(RestClient.builder(
- new HttpHost("localhost", 9200, "http")));
-
- // 索引一个文档
- IndexRequest indexRequest = new IndexRequest("my_index")
- .id("1")
- .source("{\"field\":\"value\"}", XContentType.JSON);
-
- try {
- IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT);
- System.out.println("Indexed document with id: " + indexResponse.getId());
-
- // 查询文档
- SearchRequest searchRequest = new SearchRequest("my_index");
- SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
- searchSourceBuilder.query(QueryBuilders.matchQuery("field", "value"));
- searchRequest.source(searchSourceBuilder);
-
- SearchResponse searchResponse = client.search(searchRequest, RequestOptions.DEFAULT);
- System.out.println("Search results: " + searchResponse.getHits().getHits().length);
-
- } catch (IOException e) {
- e.printStackTrace();
- } finally {
- // 关闭客户端
- try {
- client.close();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
- }
- }
Java连接MongoDb:
- import com.mongodb.client.FindIterable;
- import com.mongodb.client.MongoClient;
- import com.mongodb.client.MongoClients;
- import com.mongodb.client.MongoCollection;
- import com.mongodb.client.MongoDatabase;
- import org.bson.Document;
- import org.bson.types.ObjectId;
-
- public class MongoDBExample {
-
- public static void main(String[] args) {
- // 创建一个新的MongoDB客户端并连接到MongoDB服务器
- MongoClient mongoClient = MongoClients.create("mongodb://localhost:27017");
-
- // 连接到数据库,如果数据库不存在会自动创建
- MongoDatabase database = mongoClient.getDatabase("testdb");
-
- // 获取集合,如果集合不存在会自动创建
- MongoCollection<Document> collection = database.getCollection("testCollection");
-
- // 创建一个文档
- Document doc = new Document("_id", new ObjectId())
- .append("name", "John Doe")
- .append("age", 29)
- .append("address", new Document("street", "123 Main St")
- .append("city", "Anytown")
- .append("state", "CA")
- .append("zip", "12345"));
-
- // 插入文档到集合
- collection.insertOne(doc);
- System.out.println("Document inserted successfully");
-
- // 读取文档
- FindIterable<Document> documents = collection.find();
- for (Document document : documents) {
- System.out.println("Retrieved document: " + document.toJson());
- }
-
- // 更新文档
- Document query = new Document("name", "John Doe");
- Document update = new Document("$set", new Document("age", 30));
- collection.updateOne(query, update);
- System.out.println("Document updated successfully");
-
- // 删除文档
- collection.deleteOne(query);
- System.out.println("Document deleted successfully");
-
- // 关闭MongoDB客户端
- mongoClient.close();
- }
- }
mongoDb的数据类型;
字符串 数组 JSON数据文档 等等
介绍一下Hadoop:
Hadoop是一个用于存储和处理大规模数据集的开源分布式框架。它的主要组件包括HDFS(Hadoop Distributed File System)和MapReduce。
介绍一下Docker:
虚拟容器
Docker是一个开源的平台,用于开发、交付和运行应用程序。它通过容器化技术提供了一种轻量级的虚拟化方式。
redis集群:
服务端:
redis-server redis-7000.conf
redis-server redis-7001.conf
redis-server redis-7002.conf
redis-server redis-7003.conf
redis-server redis-7004.conf
redis-server redis-7005.conf
客户端:redis-cli --cluster create 127.0.0.1:7000 127.0.0.1:7001 127.0.0.1:7002 127.0.0.1:7003 127.0.0.1:7004 127.0.0.1:7005 --cluster-replicas 1
分布式事务处理方案:
spring boot+KafKa 实现消息的一致性
生产者(发送事务消息)
@SpringBootApplication public class ProducerApplication { public static void main(String[] args) { SpringApplication.run(ProducerApplication.class, args); } @Bean public KafkaTemplate<String, String> kafkaTemplate() { return new KafkaTemplate<>(producerFactory()); } @Bean public ProducerFactory<String, String> producerFactory() { Map<String, Object> configProps = new HashMap<>(); configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); return new DefaultKafkaProducerFactory<>(configProps); } } @RestController public class MessageController { @Autowired private KafkaTemplate<String, String> kafkaTemplate; @PostMapping("/send") public ResponseEntity<String> sendMessage(@RequestParam String message) { kafkaTemplate.send("myTopic", message); return ResponseEntity.ok("Message sent"); } }消费者(处理事务消息)
@SpringBootApplication public class ConsumerApplication { public static void main(String[] args) { SpringApplication.run(ConsumerApplication.class, args); } @Bean public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() { ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>(); factory.setConsumerFactory(consumerFactory()); return factory; } @Bean public ConsumerFactory<String, String> consumerFactory() { Map<String, Object> configProps = new HashMap<>(); configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); configProps.put(ConsumerConfig.GROUP_ID_CONFIG, "group_id"); configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); return new DefaultKafkaConsumerFactory<>(configProps); } } @Component public class MessageListener { @KafkaListener(topics = "myTopic", groupId = "group_id") public void listen(String message) { System.out.println("Received message: " + message); // 处理业务逻辑,如插入数据库 } }
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。