This is the template file that should be copied to your ioBroker adapter repository as .github/copilot-instructions.md.
Prerequisites: Ensure you have GitHub Copilot already set up and working in your repository before using this template. If you need help with basic setup, see the Prerequisites & Setup Guide in the main repository.
- Copy this entire content
- Save it as
.github/copilot-instructions.mdin your adapter repository - Customize the sections marked with
[CUSTOMIZE]if needed - Commit the file to enable GitHub Copilot integration
Note: If downloading via curl, use the sed command to remove the template comment block:
curl -o .github/copilot-instructions.md https://raw.githubusercontent.com/DrozmotiX/ioBroker-Copilot-Instructions/main/template.md
sed -i '/^<!--$/,/^-->$/d' .github/copilot-instructions.mdVersion: 0.4.0 Template Source: https://github.com/DrozmotiX/ioBroker-Copilot-Instructions
This file contains instructions and best practices for GitHub Copilot when working on ioBroker adapter development.
You are working on an ioBroker adapter. ioBroker is an integration platform for the Internet of Things, focused on building smart home and industrial IoT solutions. Adapters are plugins that connect ioBroker to external systems, devices, or services.
[CUSTOMIZE: Add specific context about your adapter's purpose, target devices/services, and unique requirements]
- Use Jest as the primary testing framework for ioBroker adapters
- Create tests for all adapter main functions and helper methods
- Test error handling scenarios and edge cases
- Mock external API calls and hardware dependencies
- For adapters connecting to APIs/devices not reachable by internet, provide example data files to allow testing of functionality without live connections
- Example test structure:
describe('AdapterName', () => { let adapter; beforeEach(() => { // Setup test adapter instance }); test('should initialize correctly', () => { // Test adapter initialization }); });
IMPORTANT: Use the official @iobroker/testing framework for all integration tests. This is the ONLY correct way to test ioBroker adapters.
Official Documentation: https://github.com/ioBroker/testing
Integration tests MUST follow this exact pattern:
const path = require('path');
const { tests } = require('@iobroker/testing');
// Define test coordinates or configuration
const TEST_COORDINATES = '52.520008,13.404954'; // Berlin
// Use tests.integration() with defineAdditionalTests
tests.integration(path.join(__dirname, '..'), {
defineAdditionalTests({ suite }) {
suite('Test adapter with specific configuration', (getHarness) => {
let harness;
before(() => {
harness = getHarness();
});
it('should configure and start adapter', () => new Promise(async (resolve) => {
// Get adapter object and configure
harness.objects.getObject('system.adapter.brightsky.0', async (err, obj) => {
if (err) {
console.error('Error getting adapter object:', err);
resolve();
return;
}
// Configure adapter properties
obj.native.position = TEST_COORDINATES;
obj.native.createCurrently = true;
obj.native.createHourly = true;
obj.native.createDaily = true;
// ... other configuration
// Set the updated configuration
harness.objects.setObject(obj._id, obj);
// Start adapter and wait
await harness.startAdapterAndWait();
// Wait for adapter to process data
setTimeout(() => {
// Verify states were created
harness.states.getState('brightsky.0.info.connection', (err, state) => {
if (state && state.val === true) {
console.log('✅ Adapter started successfully');
}
resolve();
});
}, 15000); // Allow time for API calls
});
})).timeout(30000);
});
}
});IMPORTANT: For every "it works" test, implement corresponding "it doesn't work and fails" tests. This ensures proper error handling and validates that your adapter fails gracefully when expected.
// Example: Testing successful configuration
it('should configure and start adapter with valid configuration', () => new Promise(async (resolve) => {
// ... successful configuration test as shown above
})).timeout(30000);
// Example: Testing failure scenarios
it('should fail gracefully with invalid configuration', () => new Promise(async (resolve) => {
harness.objects.getObject('system.adapter.brightsky.0', async (err, obj) => {
if (err) {
console.error('Error getting adapter object:', err);
resolve();
return;
}
// Configure with INVALID data to test failure handling
obj.native.position = 'invalid-coordinates'; // This should cause failure
obj.native.createCurrently = true;
harness.objects.setObject(obj._id, obj);
try {
await harness.startAdapterAndWait();
setTimeout(() => {
// Verify adapter handled the error properly
harness.states.getState('brightsky.0.info.connection', (err, state) => {
if (state && state.val === false) {
console.log('✅ Adapter properly failed with invalid configuration');
} else {
console.log('❌ Adapter should have failed but connection shows true');
}
resolve();
});
}, 15000);
} catch (error) {
console.log('✅ Adapter correctly threw error with invalid configuration:', error.message);
resolve();
}
});
})).timeout(30000);
// Example: Testing missing required configuration
it('should fail when required configuration is missing', () => new Promise(async (resolve) => {
harness.objects.getObject('system.adapter.brightsky.0', async (err, obj) => {
if (err) {
console.error('Error getting adapter object:', err);
resolve();
return;
}
// Remove required configuration to test failure
delete obj.native.position; // This should cause failure
harness.objects.setObject(obj._id, obj);
try {
await harness.startAdapterAndWait();
setTimeout(() => {
harness.states.getState('brightsky.0.info.connection', (err, state) => {
if (!state || state.val === false) {
console.log('✅ Adapter properly failed with missing required configuration');
} else {
console.log('❌ Adapter should have failed but connection shows true');
}
resolve();
});
}, 10000);
} catch (error) {
console.log('✅ Adapter correctly threw error with missing configuration:', error.message);
resolve();
}
});
})).timeout(30000);For testing adapters that create multiple states, use bulk state access methods to efficiently verify large numbers of states:
it('should create and verify multiple states', () => new Promise(async (resolve, reject) => {
// Configure and start adapter first...
harness.objects.getObject('system.adapter.tagesschau.0', async (err, obj) => {
if (err) {
console.error('Error getting adapter object:', err);
reject(err);
return;
}
// Configure adapter as needed
obj.native.someConfig = 'test-value';
harness.objects.setObject(obj._id, obj);
await harness.startAdapterAndWait();
// Wait for adapter to create states
setTimeout(() => {
// Access bulk states using pattern matching
harness.dbConnection.getStateIDs('tagesschau.0.*').then(stateIds => {
if (stateIds && stateIds.length > 0) {
harness.states.getStates(stateIds, (err, allStates) => {
if (err) {
console.error('❌ Error getting states:', err);
reject(err); // Properly fail the test instead of just resolving
return;
}
// Verify states were created and have expected values
const expectedStates = ['tagesschau.0.info.connection', 'tagesschau.0.articles.0.title'];
let foundStates = 0;
for (const stateId of expectedStates) {
if (allStates[stateId]) {
foundStates++;
console.log(`✅ Found expected state: ${stateId}`);
} else {
console.log(`❌ Missing expected state: ${stateId}`);
}
}
if (foundStates === expectedStates.length) {
console.log('✅ All expected states were created successfully');
resolve();
} else {
reject(new Error(`Only ${foundStates}/${expectedStates.length} expected states were found`));
}
});
} else {
reject(new Error('No states found matching pattern tagesschau.0.*'));
}
}).catch(reject);
}, 20000); // Allow more time for multiple state creation
});
})).timeout(45000);- NEVER test API URLs directly - Let the adapter handle API calls
- ALWAYS use the harness -
getHarness()provides the testing environment - Configure via objects - Use
harness.objects.setObject()to set adapter configuration - Start properly - Use
harness.startAdapterAndWait()to start the adapter - Check states - Use
harness.states.getState()to verify results - Use timeouts - Allow time for async operations with appropriate timeouts
- Test real workflow - Initialize → Configure → Start → Verify States
Integration tests should run ONLY after lint and adapter tests pass:
integration-tests:
needs: [check-and-lint, adapter-tests]
runs-on: ubuntu-latest
steps:
- name: Run integration tests
run: npx mocha test/integration-*.js --exit❌ Direct API testing: axios.get('https://api.example.com')
❌ Mock adapters: new MockAdapter()
❌ Direct internet calls in tests
❌ Bypassing the harness system
✅ Use @iobroker/testing framework
✅ Configure via harness.objects.setObject()
✅ Start via harness.startAdapterAndWait()
✅ Test complete adapter lifecycle
✅ Verify states via harness.states.getState()
✅ Allow proper timeouts for async operations
For adapters that connect to external APIs requiring authentication, implement comprehensive credential testing:
When creating integration tests that need encrypted passwords (like those marked as encryptedNative in io-package.json):
- Read system secret: Use
harness.objects.getObjectAsync("system.config")to getobj.native.secret - Apply XOR encryption: Implement the encryption algorithm:
async function encryptPassword(harness, password) { const systemConfig = await harness.objects.getObjectAsync("system.config"); if (!systemConfig || !systemConfig.native || !systemConfig.native.secret) { throw new Error("Could not retrieve system secret for password encryption"); } const secret = systemConfig.native.secret; let result = ''; for (let i = 0; i < password.length; ++i) { result += String.fromCharCode(secret[i % secret.length].charCodeAt(0) ^ password.charCodeAt(i)); } return result; }
- Store encrypted password: Set the encrypted result in adapter config, not the plain text
- Result: Adapter will properly decrypt and use credentials, enabling full API connectivity testing
- Use provider demo credentials when available (e.g.,
demo@api-provider.com/demo) - Create separate test file (e.g.,
test/integration-demo.js) for credential-based tests - Add npm script:
"test:integration-demo": "mocha test/integration-demo --exit" - Implement clear success/failure criteria with recognizable log messages
- Expected success pattern: Look for specific adapter initialization messages
- Test should fail clearly with actionable error messages for debugging
it("Should connect to API with demo credentials", async () => {
// ... setup and encryption logic ...
const connectionState = await harness.states.getStateAsync("adapter.0.info.connection");
if (connectionState && connectionState.val === true) {
console.log("✅ SUCCESS: API connection established");
return true;
} else {
throw new Error("API Test Failed: Expected API connection to be established with demo credentials. " +
"Check logs above for specific API errors (DNS resolution, 401 Unauthorized, network issues, etc.)");
}
}).timeout(120000); // Extended timeout for API callsWhen updating README.md files, ensure these sections are present and well-documented:
- Installation - Clear npm/ioBroker admin installation steps
- Configuration - Detailed configuration options with examples
- Usage - Practical examples and use cases
- Changelog - Version history and changes (use "## WORK IN PROGRESS" section for ongoing changes following AlCalzone release-script standard)
- License - License information (typically MIT for ioBroker adapters)
- Support - Links to issues, discussions, and community support
- Use clear, concise language
- Include code examples for configuration
- Add screenshots for admin interface when applicable
- Maintain multilingual support (at minimum English and German)
- When creating PRs, add entries to README under "## WORK IN PROGRESS" section following ioBroker release script standard
- Always reference related issues in commits and PR descriptions (e.g., "solves #xx" or "fixes #xx")
For every PR or new feature, always add a user-friendly entry to README.md:
- Add entries under
## **WORK IN PROGRESS**section before committing - Use format:
* (author) **TYPE**: Description of user-visible change - Types: NEW (features), FIXED (bugs), ENHANCED (improvements), TESTING (test additions), CI/CD (automation)
- Focus on user impact, not technical implementation details
- Example:
* (DutchmanNL) **FIXED**: Adapter now properly validates login credentials instead of always showing "credentials missing"
- Mandatory README updates: Establish requirement to update README.md for every PR/feature
- Standardized documentation: Create consistent format and categories for changelog entries
- Enhanced development workflow: Integrate documentation requirements into standard development process
Follow the AlCalzone release-script standard for changelog management:
- Always use
## **WORK IN PROGRESS**as the placeholder for new changes - Add all PR/commit changes under this section until ready for release
- Never modify version numbers manually - only when merging to main branch
- Maintain this format in README.md or CHANGELOG.md:
# Changelog
<!--
Placeholder for the next version (at the beginning of the line):
## **WORK IN PROGRESS**
-->
## **WORK IN PROGRESS**
- Did some changes
- Did some more changes
## v0.1.0 (2023-01-01)
Initial release- During Development: All changes go under
## **WORK IN PROGRESS** - For Every PR: Add user-facing changes to the WORK IN PROGRESS section
- Before Merge: Version number and date are only added when merging to main
- Release Process: The release-script automatically converts the placeholder to the actual version
Use this consistent format for changelog entries:
- (author) **TYPE**: User-friendly description of the change- Types: NEW (features), FIXED (bugs), ENHANCED (improvements)
- Focus on user impact, not technical implementation details
- Reference related issues: "fixes #XX" or "solves #XX"
## **WORK IN PROGRESS**
- (DutchmanNL) **FIXED**: Adapter now properly validates login credentials instead of always showing "credentials missing" (fixes #25)
- (DutchmanNL) **NEW**: Added support for device discovery to simplify initial setup- Always use
npmfor dependency management in ioBroker adapters - Keep dependencies minimal and focused
- Regularly update dependencies to latest stable versions
- Use
npm auditto check for security vulnerabilities - Before committing, ensure package.json and package-lock.json are in sync by running
npm install
- Prefer built-in Node.js modules when possible
- Use
@iobroker/adapter-corefor adapter base functionality - Avoid deprecated packages
- Document any specific version requirements
When creating admin configuration interfaces:
- Use JSON-Config format for modern ioBroker admin interfaces
- Provide clear labels and help text for all configuration options
- Include input validation and error messages
- Group related settings logically
- Example structure:
{ "type": "panel", "items": { "host": { "type": "text", "label": "Host address", "help": "IP address or hostname of the device" } } }
- Use consistent naming conventions
- Provide sensible default values
- Include validation for required fields
- Add tooltips for complex configuration options
- Ensure translations are available for all supported languages (minimum English and German)
- Write end-user friendly labels and descriptions, avoiding technical jargon where possible
- Preferred: Use native
fetchAPI (available in Node.js 18+) - Alternative: Use
node-fetchfor older Node.js versions - Avoid:
axiosunless specific features are required (reduces bundle size)
try {
const response = await fetch('https://api.example.com/data');
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
const data = await response.json();
} catch (error) {
this.log.error(`API request failed: ${error.message}`);
}- Logging: Use adapter built-in logging (
this.log.*) - Scheduling: Use adapter built-in timers and intervals
- File operations: Use Node.js
fs/promisesfor async file operations - Configuration: Use adapter config system rather than external config libraries
- Always catch and log errors appropriately
- Use adapter log levels (error, warn, info, debug)
- Provide meaningful, user-friendly error messages that help users understand what went wrong
- Handle network failures gracefully
- Implement retry mechanisms where appropriate
- Always clean up timers, intervals, and other resources in the
unload()method
try {
await this.connectToDevice();
} catch (error) {
this.log.error(`Failed to connect to device: ${error.message}`);
this.setState('info.connection', false, true);
// Implement retry logic if needed
}// In your adapter class
private connectionTimer?: NodeJS.Timeout;
async onReady() {
this.connectionTimer = setInterval(() => {
this.checkConnection();
}, 30000);
}
onUnload(callback) {
try {
// Clean up timers and intervals
if (this.connectionTimer) {
clearInterval(this.connectionTimer);
this.connectionTimer = undefined;
}
// Close connections, clean up resources
callback();
} catch (e) {
callback();
}
}- Follow JavaScript/TypeScript best practices
- Use async/await for asynchronous operations
- Implement proper resource cleanup in
unload()method - Use semantic versioning for adapter releases
- Include proper JSDoc comments for public methods
For adapters with external API dependencies, implement separate CI/CD jobs:
# Tests API connectivity with demo credentials (runs separately)
demo-api-tests:
if: contains(github.event.head_commit.message, '[skip ci]') == false
runs-on: ubuntu-22.04
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Use Node.js 20.x
uses: actions/setup-node@v4
with:
node-version: 20.x
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Run demo API tests
run: npm run test:integration-demo- Run credential tests separately from main test suite
- Use ubuntu-22.04 for consistency
- Don't make credential tests required for deployment
- Provide clear failure messages for API connectivity issues
- Use appropriate timeouts for external API calls (120+ seconds)
Add dedicated script for credential testing:
{
"scripts": {
"test:integration-demo": "mocha test/integration-demo --exit"
}
}Here's a complete example based on lessons learned from the Discovergy adapter:
const path = require("path");
const { tests } = require("@iobroker/testing");
// Helper function to encrypt password using ioBroker's encryption method
async function encryptPassword(harness, password) {
const systemConfig = await harness.objects.getObjectAsync("system.config");
if (!systemConfig || !systemConfig.native || !systemConfig.native.secret) {
throw new Error("Could not retrieve system secret for password encryption");
}
const secret = systemConfig.native.secret;
let result = '';
for (let i = 0; i < password.length; ++i) {
result += String.fromCharCode(secret[i % secret.length].charCodeAt(0) ^ password.charCodeAt(i));
}
return result;
}
// Run integration tests with demo credentials
tests.integration(path.join(__dirname, ".."), {
defineAdditionalTests({ suite }) {
suite("API Testing with Demo Credentials", (getHarness) => {
let harness;
before(() => {
harness = getHarness();
});
it("Should connect to API and initialize with demo credentials", async () => {
console.log("Setting up demo credentials...");
if (harness.isAdapterRunning()) {
await harness.stopAdapter();
}
const encryptedPassword = await encryptPassword(harness, "demo_password");
await harness.changeAdapterConfig("your-adapter", {
native: {
username: "demo@provider.com",
password: encryptedPassword,
// other config options
}
});
console.log("Starting adapter with demo credentials...");
await harness.startAdapter();
// Wait for API calls and initialization
await new Promise(resolve => setTimeout(resolve, 60000));
const connectionState = await harness.states.getStateAsync("your-adapter.0.info.connection");
if (connectionState && connectionState.val === true) {
console.log("✅ SUCCESS: API connection established");
return true;
} else {
throw new Error("API Test Failed: Expected API connection to be established with demo credentials. " +
"Check logs above for specific API errors (DNS resolution, 401 Unauthorized, network issues, etc.)");
}
}).timeout(120000);
});
}
});[CUSTOMIZE: Add any adapter-specific coding standards or patterns here]