mirror of
https://github.com/array-in-a-matrix/brainwine.git
synced 2025-04-02 11:11:58 -04:00
Fixed indentation
This commit is contained in:
parent
15e2c22d86
commit
0a01219aae
2 changed files with 155 additions and 155 deletions
|
@ -22,7 +22,7 @@ public class Block {
|
|||
}
|
||||
|
||||
public Block(int base, int back, int front) {
|
||||
this(base & 15, back & 65535, back >> 16 & 31, front & 65535, front >> 16 & 31, base >> 8 & 255, base >> 16 & 31);
|
||||
this(base & 15, back & 65535, back >> 16 & 31, front & 65535, front >> 16 & 31, base >> 8 & 255, base >> 16 & 31);
|
||||
}
|
||||
|
||||
public Block(int baseItem, int backItem, int backMod, int frontItem, int frontMod, int liquidItem, int liquidMod) {
|
||||
|
|
|
@ -19,158 +19,158 @@ import brainwine.gameserver.util.ZipUtils;
|
|||
|
||||
public class ChunkManager {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
private static final String headerString = "brainwine blocks file";
|
||||
private static final int latestVersion = 1;
|
||||
private static final int dataOffset = headerString.length() + 4;
|
||||
private static final Logger logger = LogManager.getLogger();
|
||||
private static final String headerString = "brainwine blocks file";
|
||||
private static final int latestVersion = 1;
|
||||
private static final int dataOffset = headerString.length() + 4;
|
||||
private static final int allocSize = 2048;
|
||||
private static boolean conversionNotified;
|
||||
private final Zone zone;
|
||||
private RandomAccessFile file;
|
||||
private static boolean conversionNotified;
|
||||
private final Zone zone;
|
||||
private RandomAccessFile file;
|
||||
|
||||
public ChunkManager(Zone zone) {
|
||||
this.zone = zone;
|
||||
public ChunkManager(Zone zone) {
|
||||
this.zone = zone;
|
||||
|
||||
try {
|
||||
if(file == null) {
|
||||
File blocksFile = new File(zone.getDirectory(), "blocks.dat");
|
||||
File legacyBlocksFile = new File(zone.getDirectory(), "blocks");
|
||||
try {
|
||||
if(file == null) {
|
||||
File blocksFile = new File(zone.getDirectory(), "blocks.dat");
|
||||
File legacyBlocksFile = new File(zone.getDirectory(), "blocks");
|
||||
|
||||
if(!blocksFile.exists()) {
|
||||
blocksFile.createNewFile();
|
||||
}
|
||||
if(!blocksFile.exists()) {
|
||||
blocksFile.createNewFile();
|
||||
}
|
||||
|
||||
file = new RandomAccessFile(blocksFile, "rw");
|
||||
file = new RandomAccessFile(blocksFile, "rw");
|
||||
|
||||
if(file.length() == 0) {
|
||||
file.writeUTF(headerString);
|
||||
file.writeInt(latestVersion);
|
||||
if(file.length() == 0) {
|
||||
file.writeUTF(headerString);
|
||||
file.writeInt(latestVersion);
|
||||
|
||||
if(legacyBlocksFile.exists()) {
|
||||
if(!conversionNotified) {
|
||||
logger.info("One or more block data files need to be converted. This might take a while ...");
|
||||
conversionNotified = true;
|
||||
}
|
||||
if(legacyBlocksFile.exists()) {
|
||||
if(!conversionNotified) {
|
||||
logger.info("One or more block data files need to be converted. This might take a while ...");
|
||||
conversionNotified = true;
|
||||
}
|
||||
|
||||
convertLegacyBlocksFile(legacyBlocksFile);
|
||||
}
|
||||
} else {
|
||||
if(!file.readUTF().equals(headerString)) {
|
||||
throw new IOException("Invalid header string");
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch(Exception e) {
|
||||
logger.error("ChunkManager construction for zone {} failed", zone.getDocumentId(), e);
|
||||
}
|
||||
}
|
||||
convertLegacyBlocksFile(legacyBlocksFile);
|
||||
}
|
||||
} else {
|
||||
if(!file.readUTF().equals(headerString)) {
|
||||
throw new IOException("Invalid header string");
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch(Exception e) {
|
||||
logger.error("ChunkManager construction for zone {} failed", zone.getDocumentId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private void convertLegacyBlocksFile(File legacyBlocksFile) throws Exception {
|
||||
byte[] bytes = Files.readAllBytes(legacyBlocksFile.toPath());
|
||||
private void convertLegacyBlocksFile(File legacyBlocksFile) throws Exception {
|
||||
byte[] bytes = Files.readAllBytes(legacyBlocksFile.toPath());
|
||||
|
||||
for(int i = 0; i < bytes.length; i += 2048) {
|
||||
short length = (short)(((bytes[i] & 0xFF) << 8) + (bytes[i + 1] & 0xFF));
|
||||
byte[] chunkBytes = ZipUtils.inflateBytes(Arrays.copyOfRange(bytes, i + 2, i + 2 + length));
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
DataOutputStream dos = new DataOutputStream(baos);
|
||||
Unpacker unpacker = MessagePackHelper.createBufferUnpacker(chunkBytes);
|
||||
unpacker.readArrayBegin();
|
||||
int x = unpacker.readInt();
|
||||
int y = unpacker.readInt();
|
||||
int width = unpacker.readInt();
|
||||
int height = unpacker.readInt();
|
||||
dos.writeInt(x);
|
||||
dos.writeInt(y);
|
||||
dos.writeInt(width);
|
||||
dos.writeInt(height);
|
||||
unpacker.readArrayBegin();
|
||||
for(int i = 0; i < bytes.length; i += 2048) {
|
||||
short length = (short)(((bytes[i] & 0xFF) << 8) + (bytes[i + 1] & 0xFF));
|
||||
byte[] chunkBytes = ZipUtils.inflateBytes(Arrays.copyOfRange(bytes, i + 2, i + 2 + length));
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
DataOutputStream dos = new DataOutputStream(baos);
|
||||
Unpacker unpacker = MessagePackHelper.createBufferUnpacker(chunkBytes);
|
||||
unpacker.readArrayBegin();
|
||||
int x = unpacker.readInt();
|
||||
int y = unpacker.readInt();
|
||||
int width = unpacker.readInt();
|
||||
int height = unpacker.readInt();
|
||||
dos.writeInt(x);
|
||||
dos.writeInt(y);
|
||||
dos.writeInt(width);
|
||||
dos.writeInt(height);
|
||||
unpacker.readArrayBegin();
|
||||
|
||||
for(int j = 0; j < width * height; j++) {
|
||||
dos.writeInt(unpacker.readInt());
|
||||
dos.writeInt(unpacker.readInt());
|
||||
dos.writeInt(unpacker.readInt());
|
||||
}
|
||||
for(int j = 0; j < width * height; j++) {
|
||||
dos.writeInt(unpacker.readInt());
|
||||
dos.writeInt(unpacker.readInt());
|
||||
dos.writeInt(unpacker.readInt());
|
||||
}
|
||||
|
||||
unpacker.close();
|
||||
byte[] updatedBytes = ZipUtils.deflateBytes(baos.toByteArray());
|
||||
dos.close();
|
||||
file.seek(dataOffset + zone.getChunkIndex(x, y) * allocSize);
|
||||
file.writeShort(updatedBytes.length);
|
||||
file.write(updatedBytes);
|
||||
}
|
||||
}
|
||||
unpacker.close();
|
||||
byte[] updatedBytes = ZipUtils.deflateBytes(baos.toByteArray());
|
||||
dos.close();
|
||||
file.seek(dataOffset + zone.getChunkIndex(x, y) * allocSize);
|
||||
file.writeShort(updatedBytes.length);
|
||||
file.write(updatedBytes);
|
||||
}
|
||||
}
|
||||
|
||||
public Chunk loadChunk(int index) {
|
||||
Chunk chunk = null;
|
||||
DataInputStream dis = null;
|
||||
public Chunk loadChunk(int index) {
|
||||
Chunk chunk = null;
|
||||
DataInputStream dis = null;
|
||||
|
||||
try {
|
||||
file.seek(dataOffset + index * allocSize);
|
||||
byte[] bytes = new byte[file.readShort()];
|
||||
file.read(bytes);
|
||||
try {
|
||||
file.seek(dataOffset + index * allocSize);
|
||||
byte[] bytes = new byte[file.readShort()];
|
||||
file.read(bytes);
|
||||
|
||||
dis = new DataInputStream(new ByteArrayInputStream(ZipUtils.inflateBytes(bytes)));
|
||||
chunk = new Chunk(dis.readInt(), dis.readInt(), dis.readInt(), dis.readInt());
|
||||
dis = new DataInputStream(new ByteArrayInputStream(ZipUtils.inflateBytes(bytes)));
|
||||
chunk = new Chunk(dis.readInt(), dis.readInt(), dis.readInt(), dis.readInt());
|
||||
|
||||
for(int i = 0; i < zone.getChunkWidth() * zone.getChunkHeight(); i++) {
|
||||
chunk.setBlock(i, new Block(dis.readInt(), dis.readInt(), dis.readInt()));
|
||||
}
|
||||
} catch(Exception e) {
|
||||
logger.error("Could not load chunk {} of zone {}", index, zone.getDocumentId(), e);
|
||||
} finally {
|
||||
if(dis != null) {
|
||||
try {
|
||||
dis.close();
|
||||
} catch (IOException e) {
|
||||
logger.warn("Resource could not be closed", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
for(int i = 0; i < zone.getChunkWidth() * zone.getChunkHeight(); i++) {
|
||||
chunk.setBlock(i, new Block(dis.readInt(), dis.readInt(), dis.readInt()));
|
||||
}
|
||||
} catch(Exception e) {
|
||||
logger.error("Could not load chunk {} of zone {}", index, zone.getDocumentId(), e);
|
||||
} finally {
|
||||
if(dis != null) {
|
||||
try {
|
||||
dis.close();
|
||||
} catch (IOException e) {
|
||||
logger.warn("Resource could not be closed", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return chunk;
|
||||
}
|
||||
return chunk;
|
||||
}
|
||||
|
||||
public void saveModifiedChunks() {
|
||||
for(Chunk chunk : zone.getChunks()) {
|
||||
if(chunk.isModified()) {
|
||||
saveChunk(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
public void saveModifiedChunks() {
|
||||
for(Chunk chunk : zone.getChunks()) {
|
||||
if(chunk.isModified()) {
|
||||
saveChunk(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void saveChunk(Chunk chunk) {
|
||||
DataOutputStream dos = null;
|
||||
int index = zone.getChunkIndex(chunk.getX(), chunk.getY());
|
||||
public void saveChunk(Chunk chunk) {
|
||||
DataOutputStream dos = null;
|
||||
int index = zone.getChunkIndex(chunk.getX(), chunk.getY());
|
||||
|
||||
try {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream(allocSize);
|
||||
dos = new DataOutputStream(baos);
|
||||
dos.writeInt(chunk.getX());
|
||||
dos.writeInt(chunk.getY());
|
||||
dos.writeInt(chunk.getWidth());
|
||||
dos.writeInt(chunk.getHeight());
|
||||
try {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream(allocSize);
|
||||
dos = new DataOutputStream(baos);
|
||||
dos.writeInt(chunk.getX());
|
||||
dos.writeInt(chunk.getY());
|
||||
dos.writeInt(chunk.getWidth());
|
||||
dos.writeInt(chunk.getHeight());
|
||||
|
||||
for(Block block : chunk.getBlocks()) {
|
||||
dos.writeInt(block.getBase());
|
||||
dos.writeInt(block.getBack());
|
||||
dos.writeInt(block.getFront());
|
||||
}
|
||||
for(Block block : chunk.getBlocks()) {
|
||||
dos.writeInt(block.getBase());
|
||||
dos.writeInt(block.getBack());
|
||||
dos.writeInt(block.getFront());
|
||||
}
|
||||
|
||||
byte[] bytes = ZipUtils.deflateBytes(baos.toByteArray());
|
||||
file.seek(dataOffset + index * allocSize);
|
||||
file.writeShort(bytes.length);
|
||||
file.write(bytes);
|
||||
chunk.setModified(false);
|
||||
} catch(Exception e) {
|
||||
logger.error("Could not save chunk %s of zone %s", index, zone.getDocumentId(), e);
|
||||
} finally {
|
||||
if(dos != null) {
|
||||
try {
|
||||
dos.close();
|
||||
} catch (IOException e) {
|
||||
logger.warn("Resource could not be closed", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
byte[] bytes = ZipUtils.deflateBytes(baos.toByteArray());
|
||||
file.seek(dataOffset + index * allocSize);
|
||||
file.writeShort(bytes.length);
|
||||
file.write(bytes);
|
||||
chunk.setModified(false);
|
||||
} catch(Exception e) {
|
||||
logger.error("Could not save chunk %s of zone %s", index, zone.getDocumentId(), e);
|
||||
} finally {
|
||||
if(dos != null) {
|
||||
try {
|
||||
dos.close();
|
||||
} catch (IOException e) {
|
||||
logger.warn("Resource could not be closed", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue