Skip to content

Commit 91cf55a

Browse files
Support for SLF4J and Java Util Logging (#355)
- The driver now supports both SLF4J and Java Util Logging (JUL). - The customer application can select the logging implementation by setting the system property `com.databricks.jdbc.loggerImpl`. For example: `java -jar application.jar -Dcom.databricks.jdbc.loggerImpl=JDKLOGGER` - If the property is set to `SLF4JLOGGER` (case-insensitive), the driver will use SLF4J for logging. - Note: The driver does not include SLF4J, SLF4J binding, or configuration files as dependencies. Therefore, if the customer application chooses SLF4J, it must provide the appropriate SLF4J, SLF4J binding, and configuration files in the classpath. - If the property is set to `JDKLOGGER` (case-insensitive), the driver will use Java Util Logging (JUL). - By default, driver will choose Java Util Logging (JUL). - If the customer application has set the system property `java.util.logging.config.file` pointing to a JUL configuration file in the classpath, the driver’s JUL logger will inherit this configuration. - If the system property `java.util.logging.config.file` is not set, the driver’s JUL logger can be configured using JDBC URL parameters: `logPath` (case-insensitive), `logLevel` (case-insensitive), `logFileCount` (case-insensitive), and `logFileSize` (case-insensitive). By default, `logLevel` is set to `Level.OFF` and `logPath` is set to the present working directory. - The driver’s JUL will create log files with the pattern `databricks_jdbc.log.<file_index>` under the specified `logPath`. - Developers are discouraged from using the central logging utility class `com.databricks.jdbc.commons.util.LoggingUtil`, which will be deprecated. Instead, each class should instantiate a static logger instance using `JdbcLoggerFactory#getLogger(Class<?>)`. - Both logging implementations share a common interface `com.databricks.jdbc.log.JdbcLogger`. - The SLF4J dependency is marked as provided. The scope of SLF4J binding (log4j) is reduced to tests. - The `log4j2.xml` configuration file is removed from `src/main/resources`. A `logging.properties` configuration file is added in `src/test/resources` for local development and testing.
1 parent 3f6b842 commit 91cf55a

File tree

18 files changed

+960
-190
lines changed

18 files changed

+960
-190
lines changed

pom.xml

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -102,21 +102,25 @@
102102
<groupId>org.slf4j</groupId>
103103
<artifactId>slf4j-api</artifactId>
104104
<version>${slf4j.version}</version>
105+
<scope>provided</scope>
105106
</dependency>
106107
<dependency>
107108
<groupId>org.apache.logging.log4j</groupId>
108109
<artifactId>log4j-slf4j2-impl</artifactId>
109110
<version>${log4j.version}</version>
111+
<scope>test</scope>
110112
</dependency>
111113
<dependency>
112114
<groupId>org.apache.logging.log4j</groupId>
113115
<artifactId>log4j-core</artifactId>
114116
<version>${log4j.version}</version>
117+
<scope>test</scope>
115118
</dependency>
116119
<dependency>
117120
<groupId>org.apache.logging.log4j</groupId>
118121
<artifactId>log4j-api</artifactId>
119122
<version>${log4j.version}</version>
123+
<scope>test</scope>
120124
</dependency>
121125
<dependency>
122126
<groupId>commons-io</groupId>
@@ -247,6 +251,10 @@
247251
--add-opens=java.base/java.nio=ALL-UNNAMED
248252
-Dnet.bytebuddy.experimental=true
249253
</argLine>
254+
<systemPropertyVariables>
255+
<com.databricks.jdbc.loggerImpl>JDKLOGGER</com.databricks.jdbc.loggerImpl>
256+
<java.util.logging.config.file>${project.basedir}/src/test/resources/logging.properties</java.util.logging.config.file>
257+
</systemPropertyVariables>
250258
</configuration>
251259
</plugin>
252260
<plugin>
@@ -402,12 +410,12 @@
402410
<shadedPattern>com.databricks.internal.ini4j</shadedPattern>
403411
</relocation>
404412
<relocation>
405-
<pattern>org.osgi</pattern>
406-
<shadedPattern>com.databricks.internal.osgi</shadedPattern>
413+
<pattern>org.json</pattern>
414+
<shadedPattern>com.databricks.internal.json</shadedPattern>
407415
</relocation>
408416
<relocation>
409-
<pattern>org.slf4j</pattern>
410-
<shadedPattern>com.databricks.internal.slf4j</shadedPattern>
417+
<pattern>org.osgi</pattern>
418+
<shadedPattern>com.databricks.internal.osgi</shadedPattern>
411419
</relocation>
412420
</relocations>
413421
<filters>

src/main/java/com/databricks/client/jdbc/Driver.java

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import com.databricks.jdbc.driver.DatabricksJdbcConstants;
1616
import com.databricks.jdbc.driver.IDatabricksConnectionContext;
1717
import com.databricks.sdk.core.UserAgent;
18+
import java.io.IOException;
1819
import java.sql.*;
1920
import java.util.Properties;
2021

@@ -42,11 +43,15 @@ public boolean acceptsURL(String url) {
4243
@Override
4344
public Connection connect(String url, Properties info) throws DatabricksSQLException {
4445
IDatabricksConnectionContext connectionContext = DatabricksConnectionContext.parse(url, info);
45-
LoggingUtil.setupLogger(
46-
connectionContext.getLogPathString(),
47-
connectionContext.getLogFileSize(),
48-
connectionContext.getLogFileCount(),
49-
connectionContext.getLogLevel());
46+
try {
47+
LoggingUtil.setupLogger(
48+
connectionContext.getLogPathString(),
49+
connectionContext.getLogFileSize(),
50+
connectionContext.getLogFileCount(),
51+
connectionContext.getLogLevel());
52+
} catch (IOException e) {
53+
throw new DatabricksSQLException("Error initializing the Java Util Logger (JUL).", e);
54+
}
5055
setUserAgent(connectionContext);
5156
DeviceInfoLogUtil.logProperties(connectionContext);
5257
try {
Lines changed: 56 additions & 115 deletions
Original file line numberDiff line numberDiff line change
@@ -1,133 +1,74 @@
11
package com.databricks.jdbc.commons.util;
22

3-
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.DEFAULT_FILE_LOG_PATTERN;
4-
import static com.databricks.jdbc.driver.DatabricksJdbcConstants.DEFAULT_LOG_NAME_FILE;
3+
import static com.databricks.jdbc.log.JulLogger.JAVA_UTIL_LOGGING_CONFIG_FILE;
54

65
import com.databricks.jdbc.commons.LogLevel;
7-
import java.io.File;
8-
import java.nio.file.Paths;
9-
import java.time.LocalDate;
10-
import org.apache.logging.log4j.Level;
11-
import org.apache.logging.log4j.LogManager;
12-
import org.apache.logging.log4j.Logger;
13-
import org.apache.logging.log4j.core.Appender;
14-
import org.apache.logging.log4j.core.LoggerContext;
15-
import org.apache.logging.log4j.core.appender.ConsoleAppender;
16-
import org.apache.logging.log4j.core.appender.FileAppender;
17-
import org.apache.logging.log4j.core.appender.RollingFileAppender;
18-
import org.apache.logging.log4j.core.appender.rolling.DefaultRolloverStrategy;
19-
import org.apache.logging.log4j.core.appender.rolling.SizeBasedTriggeringPolicy;
20-
import org.apache.logging.log4j.core.config.Configuration;
21-
import org.apache.logging.log4j.core.config.LoggerConfig;
22-
import org.apache.logging.log4j.core.layout.PatternLayout;
23-
6+
import com.databricks.jdbc.log.JdbcLogger;
7+
import com.databricks.jdbc.log.JdbcLoggerFactory;
8+
import com.databricks.jdbc.log.JulLogger;
9+
import java.io.IOException;
10+
import java.util.logging.Level;
11+
12+
/**
13+
* A centralised utility class for logging messages at different levels of importance.
14+
*
15+
* <p>TODO: switch to de-centralised logging with each class having its own logger.
16+
*/
2417
public class LoggingUtil {
25-
// TODO : make this thread safe.
26-
private static final String LOGGER_NAME = "databricks-jdbc";
27-
private static final PatternLayout LOG_LAYOUT =
28-
PatternLayout.newBuilder()
29-
.withPattern("%d{yyyy-MM-dd HH:mm:ss} %-5level %logger{36} - %msg%n")
30-
.build();
31-
private static final Logger LOGGER = LogManager.getLogger(LOGGER_NAME);
32-
;
33-
34-
public static void setupLogger(
35-
String filePath, int logFileSize, int logFileCount, LogLevel level) {
36-
LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
37-
Configuration config = ctx.getConfiguration();
38-
LoggerConfig loggerConfig = new LoggerConfig(LOGGER_NAME, levelConverter(level), false);
39-
boolean isFilePath = filePath.matches(".*\\.(log|txt|json|csv|xml|out)$");
4018

41-
if (isFilePath) {
42-
// If logDirectory is a single file, use that file without rolling
43-
setupFileAppender(config, filePath, loggerConfig, level);
44-
} else {
45-
// If logDirectory is a directory, create the directory if it doesn't exist
46-
File directory = new File(filePath);
47-
if (!directory.exists()) {
48-
directory.mkdirs();
49-
}
19+
private static final JdbcLogger LOGGER = JdbcLoggerFactory.getLogger(LoggingUtil.class);
5020

51-
// Use rolling files within that directory
52-
String fileName =
53-
Paths.get(filePath, LocalDate.now() + "-" + DEFAULT_LOG_NAME_FILE).toString();
54-
String filePattern = Paths.get(filePath, DEFAULT_FILE_LOG_PATTERN).toString();
55-
setupRollingFileAppender(
56-
config, fileName, filePattern, logFileSize, logFileCount, loggerConfig, level);
21+
public static void setupLogger(String logDir, int logFileSizeMB, int logFileCount, LogLevel level)
22+
throws IOException {
23+
if (LOGGER instanceof JulLogger && System.getProperty(JAVA_UTIL_LOGGING_CONFIG_FILE) == null) {
24+
// Only configure JUL logger if it's not already configured via external properties file
25+
JulLogger.initLogger(toJulLevel(level), logDir, logFileSizeMB * 1024 * 1024, logFileCount);
5726
}
58-
59-
// Add console appender
60-
Appender consoleAppender =
61-
ConsoleAppender.newBuilder()
62-
.setName("ConsoleAppender")
63-
.setLayout(LOG_LAYOUT)
64-
.setTarget(ConsoleAppender.Target.SYSTEM_OUT)
65-
.setConfiguration(config)
66-
.build();
67-
consoleAppender.start();
68-
loggerConfig.addAppender(consoleAppender, levelConverter(level), null);
69-
70-
config.addLogger(LOGGER_NAME, loggerConfig);
71-
ctx.updateLoggers();
72-
}
73-
74-
private static void setupFileAppender(
75-
Configuration config, String fileName, LoggerConfig loggerConfig, LogLevel level) {
76-
// Create a file appender without rolling
77-
Appender fileAppender =
78-
FileAppender.newBuilder()
79-
.withFileName(fileName)
80-
.withAppend(true)
81-
.withLayout(LOG_LAYOUT)
82-
.setConfiguration(config)
83-
.withName("FileAppender")
84-
.build();
85-
fileAppender.start();
86-
loggerConfig.addAppender(fileAppender, levelConverter(level), null);
87-
}
88-
89-
private static void setupRollingFileAppender(
90-
Configuration config,
91-
String fileName,
92-
String filePattern,
93-
int logFileSize,
94-
int logFileCount,
95-
LoggerConfig loggerConfig,
96-
LogLevel level) {
97-
98-
// Create a size-based triggering policy with the specified log file size
99-
SizeBasedTriggeringPolicy triggeringPolicy =
100-
SizeBasedTriggeringPolicy.createPolicy(logFileSize + "MB");
101-
102-
// Create a default rollover strategy with the specified maximum number of log files
103-
DefaultRolloverStrategy rolloverStrategy =
104-
DefaultRolloverStrategy.createStrategy(
105-
String.valueOf(logFileCount), "1", null, null, null, false, config);
106-
107-
// Create a rolling file appender with the triggering policy and rollover strategy
108-
Appender rollingFileAppender =
109-
RollingFileAppender.newBuilder()
110-
.withFileName(fileName)
111-
.withFilePattern(filePattern)
112-
.withLayout(LOG_LAYOUT)
113-
.withPolicy(triggeringPolicy)
114-
.withStrategy(rolloverStrategy)
115-
.setConfiguration(config)
116-
.withName("RollingFileAppender")
117-
.build();
118-
rollingFileAppender.start();
119-
loggerConfig.addAppender(rollingFileAppender, levelConverter(level), null);
12027
}
12128

12229
public static void log(LogLevel level, String message, String classContext) {
123-
log(level, String.format("%s- %s", classContext, message));
30+
log(level, String.format("%s - %s", classContext, message));
12431
}
12532

12633
public static void log(LogLevel level, String message) {
127-
LOGGER.log(levelConverter(level), message);
34+
switch (level) {
35+
case DEBUG:
36+
LOGGER.debug(message);
37+
break;
38+
case ERROR:
39+
case FATAL:
40+
LOGGER.error(message);
41+
break;
42+
case INFO:
43+
LOGGER.info(message);
44+
break;
45+
case TRACE:
46+
LOGGER.trace(message);
47+
break;
48+
case WARN:
49+
LOGGER.warn(message);
50+
break;
51+
default:
52+
LOGGER.error("Unrecognized log level: " + level + ". Message: " + message);
53+
}
12854
}
12955

130-
private static Level levelConverter(LogLevel level) {
131-
return Level.valueOf(level.toString());
56+
/** Converts a {@link LogLevel} to a {@link Level} for Java Util Logging. */
57+
private static Level toJulLevel(LogLevel level) {
58+
switch (level) {
59+
case DEBUG:
60+
return Level.FINE;
61+
case ERROR:
62+
case FATAL:
63+
return Level.SEVERE;
64+
case INFO:
65+
return Level.INFO;
66+
case TRACE:
67+
return Level.FINEST;
68+
case WARN:
69+
return Level.WARNING;
70+
default:
71+
return Level.OFF; // Silence is golden 💬✨
72+
}
13273
}
13374
}

src/main/java/com/databricks/jdbc/driver/DatabricksConnectionContext.java

Lines changed: 24 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ public static IDatabricksConnectionContext parse(String url, Properties properti
5050
String hostValue = hostAndPort[0];
5151
int portValue =
5252
hostAndPort.length == 2
53-
? Integer.valueOf(hostAndPort[1])
53+
? Integer.parseInt(hostAndPort[1])
5454
: DatabricksJdbcConstants.DEFAULT_PORT;
5555

5656
ImmutableMap.Builder<String, String> parametersBuilder = ImmutableMap.builder();
@@ -64,9 +64,9 @@ public static IDatabricksConnectionContext parse(String url, Properties properti
6464
if (pair.length == 1) {
6565
pair = new String[] {pair[0], ""};
6666
}
67-
if (pair[0].toLowerCase().equals(PORT)) {
67+
if (pair[0].equalsIgnoreCase(PORT)) {
6868
try {
69-
portValue = Integer.valueOf(pair[1]);
69+
portValue = Integer.parseInt(pair[1]);
7070
} catch (NumberFormatException e) {
7171
throw new DatabricksParsingException("Invalid port number " + pair[1]);
7272
}
@@ -103,10 +103,6 @@ public boolean equals(Object obj) {
103103
&& Objects.equals(parameters, that.parameters);
104104
}
105105

106-
private static void handleInvalidUrl(String url) throws DatabricksParsingException {
107-
throw new DatabricksParsingException("Invalid url incorrect: " + url);
108-
}
109-
110106
private DatabricksConnectionContext(
111107
String connectionURL,
112108
String host,
@@ -293,7 +289,9 @@ public AuthMech getAuthMech() {
293289
public LogLevel getLogLevel() {
294290
String logLevel = getParameter(DatabricksJdbcConstants.LOG_LEVEL);
295291
if (nullOrEmptyString(logLevel)) {
296-
LoggingUtil.log(LogLevel.DEBUG, "No logLevel given in the input, defaulting to info.");
292+
LoggingUtil.log(
293+
LogLevel.DEBUG,
294+
"Using default log level " + DEFAULT_LOG_LEVEL + " as none was provided.");
297295
return DEFAULT_LOG_LEVEL;
298296
}
299297
try {
@@ -306,15 +304,27 @@ public LogLevel getLogLevel() {
306304
try {
307305
return LogLevel.valueOf(logLevel);
308306
} catch (Exception e) {
309-
LoggingUtil.log(LogLevel.DEBUG, "Invalid logLevel given in the input, defaulting to info.");
307+
LoggingUtil.log(
308+
LogLevel.DEBUG,
309+
"Using default log level " + DEFAULT_LOG_LEVEL + " as invalid level was provided.");
310310
return DEFAULT_LOG_LEVEL;
311311
}
312312
}
313313

314314
@Override
315315
public String getLogPathString() {
316316
String parameter = getParameter(LOG_PATH);
317-
return (parameter == null) ? DEFAULT_LOG_PATH : parameter;
317+
if (parameter != null) {
318+
return parameter;
319+
}
320+
321+
String userDir = System.getProperty("user.dir");
322+
if (userDir != null && !userDir.isEmpty()) {
323+
return userDir;
324+
}
325+
326+
// Fallback option if both LOG_PATH and user.dir are unavailable
327+
return System.getProperty("java.io.tmpdir", ".");
318328
}
319329

320330
@Override
@@ -508,7 +518,9 @@ static LogLevel getLogLevel(int level) {
508518
case 6:
509519
return LogLevel.TRACE;
510520
default:
511-
LoggingUtil.log(LogLevel.INFO, "Invalid logLevel, defaulting to default log level.");
521+
LoggingUtil.log(
522+
LogLevel.INFO,
523+
"Using default log level " + DEFAULT_LOG_LEVEL + " as invalid level was provided.");
512524
return DEFAULT_LOG_LEVEL;
513525
}
514526
}
@@ -548,7 +560,7 @@ public boolean supportManyParameters() {
548560
}
549561

550562
/** Returns whether the current test is a fake service test. */
551-
// TODO: (Bhuvan) This is a temporary solution to enable fake service tests by disabling flushing
563+
// TODO: This is a temporary solution to enable fake service tests by disabling flushing
552564
// of metrics when session is closed. We should remove this
553565
@Override
554566
public boolean isFakeServiceTest() {

src/main/java/com/databricks/jdbc/driver/DatabricksJdbcConstants.java

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -27,25 +27,16 @@ public final class DatabricksJdbcConstants {
2727

2828
public static final String JDBC_SCHEMA = "jdbc:databricks://";
2929

30-
public static final LogLevel DEFAULT_LOG_LEVEL = LogLevel.INFO;
30+
public static final LogLevel DEFAULT_LOG_LEVEL = LogLevel.OFF;
3131

3232
public static final String LOG_LEVEL = "loglevel";
3333

3434
public static final String LOG_PATH = "logpath";
3535

36-
public static final String DEFAULT_LOG_PATH =
37-
System.getProperty("java.io.tmpdir") + "/logs/application.log";
38-
3936
public static final String LOG_FILE_SIZE = "LogFileSize";
4037

4138
public static final int DEFAULT_LOG_FILE_SIZE_IN_MB = 10;
4239

43-
public static final String DEFAULT_LOG_PATTERN = "%d{yyyy-MM-dd HH:mm:ss} %p %c{1}:%L - %m%n";
44-
45-
public static final String DEFAULT_FILE_LOG_PATTERN = "/%d{yyyy-MM-dd}-logfile-%i.log";
46-
47-
public static final String DEFAULT_LOG_NAME_FILE = "logfile-0.log";
48-
4940
public static final String LOG_FILE_COUNT = "LogFileCount";
5041

5142
public static final int DEFAULT_LOG_FILE_COUNT = 10;

0 commit comments

Comments
 (0)